async def start_tictactoe_online(self, ctx, member: Member, mode: str): lang = await self.bot.get_guild_bot_lang(ctx.guild_id) invite_content = get_content("FUNC_INVITE_TO_GAME", lang) game_content = get_content("GAME_TTT", lang) game_name = get_content("GAMES_NAMES", lang)["TTT"] if member.id == ctx.author_id: return await ctx.send(invite_content["SELF_INVITE"]) if member.bot: return await ctx.send(invite_content["BOT_INVITE"]) message, accept = await self.invite_to_game(ctx, member, game_name) if not accept: return if mode == "3x3": board_mode = BoardMode.x3 elif mode == "4x4": board_mode = BoardMode.x4 elif mode == "5x5": board_mode = BoardMode.x5 game = TicTacToeOnline(self.bot, message, ctx, member, game_content, board_mode) await game.start_game()
def upload_result(result_dir=None, prefix=None, upload_code=None): result_dir = result_dir or os.path.join(dconf.CONTROLLER_HOME, 'output') prefix = prefix or '' upload_code = upload_code or dconf.UPLOAD_CODE files = {} for base in ('summary', 'knobs', 'metrics_before', 'metrics_after'): fpath = os.path.join(result_dir, prefix + base + '.json') # Replaces the true db version with the specified version to allow for # testing versions not officially supported by OtterTune if base == 'summary' and dconf.OVERRIDE_DB_VERSION: with open(fpath, 'r') as f: summary = json.load(f) summary['real_database_version'] = summary['database_version'] summary['database_version'] = dconf.OVERRIDE_DB_VERSION with open(fpath, 'w') as f: json.dump(summary, f, indent=1) files[base] = open(fpath, 'rb') response = requests.post(dconf.WEBSITE_URL + '/new_result/', files=files, data={'upload_code': upload_code}) if response.status_code != 200: raise Exception( 'Error uploading result.\nStatus: {}\nMessage: {}\n'.format( response.status_code, get_content(response))) for f in files.values(): # pylint: disable=not-an-iterable f.close() LOG.info(get_content(response)) return response
def test_compress_decompress(teardown_env, filesize): generate_file(filesize=filesize) compress('_test') decompress('_test_decomp') source_file_data = get_content('_test') dest_file_data = get_content('_test_decomp') assert source_file_data == dest_file_data
def __init__(self, source_path, translated_files_path, result_folder_path): self.source_path = source_path self.source_name = self.source_path.name self.translated_files_path = translated_files_path self.target_path = self.get_target_path() self.result_path = result_folder_path / self.source_name self.source_content = utils.get_content(self.source_path) self.target_content = utils.get_content(self.target_path) self.source_placeholders = utils.get_placeholders(self.source_content) self.target_placeholders = utils.get_placeholders(self.target_content)
def decrypt_aidh(kwargs = None, **kw): if kwargs != None: kw.update(kwargs) for t in ['file_url', 'domain', 'name', 'token_variable']: if t not in kw: log.error('Argument %s expected, not found' % t) cont = '' name = kw['name'] file_id = kw['file_url'] domain = kw['domain'] tvar = kw['token_variable'] try: # Si en vez de un id se pasa una URL, se toma sólo el ID de la url if file_id.find('http') != -1: file_id = utils.text_finder(file_id, 'v=', '&') url = 'http://embed.%s/embed.php?v=%s' % (domain, file_id) log.debug('[%s] Retrieving file token from "%s"' % (name, url)) cont = utils.get_content(url) except BaseException as e: log.error('[%s] Could not get content' % name) log.error(e) return '' key = utils.text_finder(cont, '%s="' % tvar, '"') if key == '': log.error('[%s] Could not retrieve download token' % name) return '' api_cont = '' try: decoded_url = 'http://www.%s/api/player.api.php?file=%s&key=%s' % (domain, file_id, key) log.debug('[%s] Retrieving link from "%s"' % (name, decoded_url)) api_cont = utils.get_content(decoded_url) except BaseException as e: log.error('[%s] Could not get download link' % name) log.error(e) return '' if api_cont.find('error') != -1: if api_cont.find('error_msg') != -1: error_msg = utils.text_finder(api_cont, 'error_msg=', '&').strip() log.error('[%s] Error found: %s' % (name, error_msg)) else: log.error('[%s] Error found, but no message found!' % name) return '' return utils.text_finder(api_cont, 'url=', '&')
def get_article_list(self, fout=None, order_by='added_at', max_get=1000000): '''获取此专题的文章列表 (包括阅读,点赞,评论,打赏数目)order_by = {'added_at', 'likes_count'} ''' articles_list = [] page = 1 num_id = self.get_collection_num_id() coll_name = self.get_collection_name() logger.info(u'专题:%s' % coll_name) while True: url = BASE_URL + '/collections/' + str( num_id) + '/notes?order_by=' + order_by + '&page=' + str(page) page += 1 content = get_content(url) page_arts = get_article(content) for page_art in page_arts: art = Article(page_art['id']) title, text = art.get_article_text(delete_wrap=True) if fout != None: fout.write(text + '\n') if len(page_arts) == 0 or len(articles_list) > max_get: logger.info(u'专题 %s 一共获取 %d 篇文章' % (coll_name, len(articles_list))) return articles_list articles_list.extend(page_arts) logger.info(u'已经获取了 %d 篇文章' % len(articles_list))
def __init__(self, collection_id='3sT4qY'): self.collection_id = collection_id self.collectionUrl = BASE_URL + '/collection/' + collection_id self.content = get_content(self.collectionUrl) if self.content == 'FAIL': logger.warning(u'此页面无法打开,或者此专题不存在') self.soup = BeautifulSoup(self.content, 'lxml')
def get_notifications(self): '''获取用户提醒,需要登入,设置cookie ''' notifications_url = BASE_URL + '/notifications?all=true' time_list = [] for page in range(1, 4): url = notifications_url + '&page=' + str(page) content = get_content(url, cookie=COOKIE) soup = BeautifulSoup(content, 'lxml') fa_hearts = soup.find('ul', attrs={ 'class': 'unstyled' }).findAll('li') for heart in fa_hearts: # logger.info(heart.i['class']) # 喜欢你的文章 或者 评论 if 'fa-heart' in heart.i['class']: time = heart.span.string # user_id = heart.findAll('a')[0]['href'].replace('/users/','') # art_id = heart.findAll('a')[1]['href'].replace('/p/','') time_list.append({'time': time, 'token': 'heart'}) logger.debug('heart:' + time) # 关注了你 if 'fa-check' in heart.i['class']: time = heart.span.string time_list.append({'time': time, 'token': 'check'}) logger.debug('check:' + time) # 关注了你的专题 if 'fa-rss-square' in heart.i['class']: time = heart.span.string time_list.append({'time': time, 'token': 'square'}) logger.debug('square:' + time) # add something return time_list
def get_article_list(self, order_by='lastest', get_max=1000000): '''获取用户的文章列表 order_by = {lastest, top} ''' article_list = {} page = 1 order_url = self.top_articles if order_by == 'top' else self.homepageUrl while True: url = order_url + '?page=' + str(page) page += 1 content = get_content(url) soup = BeautifulSoup(content, 'lxml') articles = soup.find('ul', attrs={ 'class': 'article-list latest-notes' }).findAll('li') # logger.info(len(articles)) if not articles or len(article_list) >= max_get: logger.info(u'一共获取了 %d 篇文章' % len(article_list)) return article_list for art in articles: art_id = art.h4.a['href'].replace('/p/', '') title = art.h4.a.string logger.debug(art_id) logger.debug(title) if not article_list.has_key(art_link): article_list[art_link] = title return article_links
async def set_bot_language(self, ctx: SlashContext, language: str): await ctx.defer() guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) await guild_data.configuration.set_language(language) content = get_content("SET_LANGUAGE_COMMAND", lang=guild_data.configuration.language) await ctx.send(content["LANGUAGE_CHANGED"])
async def starboard_blacklist_add( self, ctx: SlashContext, member: Member = None, role: Role = None, channel: TextChannel = None, ): guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content = get_content("STARBOARD_FUNCTIONS", guild_data.configuration.language) if not member and not role and not channel: return await ctx.send(content["BLACKLIST_NO_OPTIONS_TEXT"], hidden=True) starboard_data = guild_data.starboard if starboard_data is None: return await ctx.send(content["STARBOARD_NOT_SETUP_TEXT"], hidden=True) blacklist = starboard_data.blacklist if member and member.id not in blacklist.get("members", []): await starboard_data.add_member_to_blacklist(member.id) if role and role.id not in blacklist.get("roles", []): await starboard_data.add_role_to_blacklist(role.id) if channel and channel.id not in blacklist.get("channels", []): await starboard_data.add_channel_to_blacklist(channel.id) await ctx.send(content["BLACKLIST_ADDED_TEXT"], hidden=True)
def license(self): lic_header = utils.get_content(self.settings["header"]) author = "Copyright (C) " + str(date.today().year) + " " + self.settings["author"] headers = {} headers[".c"] = "/*\n" + author + "\n" + lic_header + "*/\n" headers[".cpp"] = "/*\n" + author + "\n" + lic_header + "*/\n" headers[".py"] = "# " + author + "\n# " + lic_header.replace("\n","\n# " )[:-4] + "\n" dir_path = os.path.dirname(os.path.realpath(__file__)) + "/" +self.project for path, subdirs, files in os.walk(dir_path): for name in files: if name not in self.settings["ignore"]: f = path + "/" + name extension = os.path.splitext(name)[1] if extension in headers: ff = open(f,"r") lines = ff.readlines() ff.close() if len(lines) >= 2: check2 = "Copyright" not in lines[1] and "Created by" not in lines[1] else: check2 = True if "Copyright" not in lines[0] and "Created by" not in lines[0] and check2: cnt = headers[extension] utils.line_prepender(f,cnt) os.system("cp " + "Licenses/" + self.settings["license"] + " " + dir_path + "LICENSE")
async def autorole_dropdown_set_status(self, ctx: SlashContext, name: str, status: str): guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content: dict = get_content("AUTOROLE_DROPDOWN", guild_data.configuration.language) autoroles = guild_data.autoroles if autoroles is None: return await ctx.send(content["NOT_SAVED_DROPDOWNS"]) autorole = None for autorole in autoroles: if autorole.name == name: break if autorole is None: return await ctx.send(content["DROPDOWN_NOT_FOUND"]) message_id = autorole.message_id original_message: ComponentMessage = await ctx.channel.fetch_message( int(message_id)) if not original_message.components: return await ctx.send(content["MESSAGE_WITHOUT_DROPDOWN_TEXT"], hidden=True) select_component: Select = original_message.components[0].components[0] if select_component.custom_id != "autorole_select": return await ctx.send(content["MESSAGE_WITHOUT_DROPDOWN_TEXT"], hidden=True) select_component.disabled = status == "disable" message_content = (content["DROPDOWN_ENABLED_TEXT"] if status == select_component.disabled else content["DROPDOWN_DISABLED_TEXT"]) await original_message.edit(components=[select_component]) await ctx.send(message_content, hidden=True)
async def autorole_create_dropdown( self, ctx: SlashContext, name: str, message_content: str, placeholder: str = None, ): guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content: dict = get_content("AUTOROLE_DROPDOWN", guild_data.configuration.language) components = [ Select( placeholder=placeholder if placeholder is not None else content["NO_OPTIONS_TEXT"], options=[SelectOption(label="None", value="None")], disabled=True, custom_id="autorole_select", ) ] message = await ctx.channel.send(content=message_content, components=components) await ctx.send(content["CREATED_DROPDOWN_TEXT"], hidden=True) await guild_data.add_autorole( name, { "content": message_content, "message_id": message.id, "autorole_type": "select_menu", "component": components[0].to_dict(), }, )
def main(): filename = './origin_data/bugreports.xml' path = './bug_reports' bugslist = utils.read_xml(filename) # print(bugslist) label = utils.read_label('./origin_data/goldset.txt') # print(label) samples, ids = utils.get_content(bugslist) # print(samples) num_word_list, numword = utils.count_word(samples) # print(len(num_word_list)) # for i in num_word_list: # num_sentence.append(len(i)) utils.savefile(samples) # print(num_sentence) results = textrank.bugsum(path, numword, num_word_list) print(len(i) for i in results) # extra_ids = index2id(results,ids) # print(len(extra_ids)) pred = eval.index2pred(results, ids) y = eval.label2y(label, ids) mean_acc, mean_pr, mean_re, mean_f1 = eval.evaluate(y, pred) print('mean_acc, mean_pr, mean_re, mean_f1', mean_acc, mean_pr, mean_re, mean_f1)
async def on_note_modal(self, ctx: ModalContext): if not ctx.custom_id.startswith("create_note_modal"): return guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content = get_content("NOTES_COMMANDS", guild_data.configuration.language) embed = Embed( title=content["NOTE_CREATED_TEXT"].format( name=ctx.values["note_name"]), description=ctx.values["note_content"], color=guild_data.configuration.embed_color, ) message = await ctx.send(embed=embed) data = { "name": ctx.values["note_name"], "created_at": datetime.datetime.now().strftime("%d.%m.%Y %H:%M"), "created_at_timestamp": datetime.datetime.now().timestamp(), "jump_url": message.jump_url, "content": ctx.values["note_content"], } if ctx.custom_id.endswith("guild"): user_data = await guild_data.get_user(ctx.author_id) else: global_data = await self.bot.mongo.get_global_data() user_data = await global_data.get_user(ctx.author_id) await user_data.add_note(data)
async def on_slash_command_error(self, ctx: SlashContext, error): embed = Embed(color=DiscordColors.RED) lang = await self.bot.get_guild_bot_lang(ctx.guild_id) if ctx.guild is not None else "en-US" content = get_content("ERRORS_DESCRIPTIONS", lang) desc = self.get_error_description(error, content) if desc is not None: embed.description = desc with contextlib.suppress(Forbidden): return await ctx.send(embed=embed) match error: case BotMissingPermissions(): missing_perms = [transform_permission(perm) for perm in error.missing_perms] desc = f'{content["BOT_DONT_HAVE_PERMS"]} `{", ".join(missing_perms)}`' case MissingPermissions(): missing_perms = [transform_permission(perm) for perm in error.missing_perms] desc = f'{content["DONT_HAVE_PERMS"]} `{", ".join(missing_perms)}`' case CheckFailure(): desc = content["CHECK_FAILURE"] case _: desc = content["OTHER_ERRORS_DESCRIPTION"].format(error=error) embed.title = content["OTHER_ERRORS_TITLE"] error_traceback = "".join(format_exception(type(error), error, error.__traceback__)) error_embed = self.get_error_embed(ctx, error, error_traceback) await self.send_error(error_embed) embed.description = desc with contextlib.suppress(Forbidden): await ctx.send(embed=embed)
async def notes_list(self, ctx: SlashContext): await ctx.defer() global_data = await self.bot.mongo.get_global_data() user_global_data = await global_data.get_user(ctx.author_id) guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) user_guild_data = await guild_data.get_user(ctx.author_id) if not user_guild_data.notes and not user_global_data.notes: raise NoData content = get_content("NOTES_COMMANDS", guild_data.configuration.language) embed = Embed( title=content["USER_NOTE_LIST"].format(ctx.author.display_name), description="", color=guild_data.configuration.embed_color, timestamp=datetime.datetime.utcnow(), ) embed.set_author(name=ctx.author.display_name, icon_url=ctx.author.avatar_url) for count, note in enumerate(user_guild_data.notes + user_global_data.notes, start=1): embed.add_field( name=f"{count}. *(<t:{int(note['created_at_timestamp'])}:R>)*", value= f" ```{note['content']}``` [{content['JUMP_TO']}]({note['jump_url']})", inline=False, ) await ctx.send(embed=embed)
async def _send_starboard_message( self, guild_data: GuildData, message: Message, stars_count: int, starboard_channel: TextChannel, ): content = get_content("STARBOARD_FUNCTIONS", guild_data.configuration.language) embed_description = ( f"{message.content}\n\n" f"**[{content['JUMP_TO_ORIGINAL_MESSAGE_TEXT']}]({message.jump_url})**" ) embed = Embed( description=embed_description, color=0xEEE2A0, timestamp=datetime.datetime.now(), ) embed.set_author(name=message.author, icon_url=message.author.avatar_url) if message.attachments: embed.set_image(url=message.attachments[0].url) starboard_message = await starboard_channel.send( content=f"⭐{stars_count} | {message.channel.mention}", embed=embed) await guild_data.starboard.add_starboard_message( message.id, starboard_message.id)
async def starboard_blacklist_remove( self, ctx: SlashContext, member: str = None, role: str = None, channel: str = None, ): guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content = get_content("STARBOARD_FUNCTIONS", guild_data.configuration.language) if not member and not role and not channel: return await ctx.send(content["BLACKLIST_NO_OPTIONS_TEXT"]) starboard_data = guild_data.starboard if starboard_data is None: return await ctx.send(content["STARBOARD_NOT_SETUP_TEXT"], hidden=True) if not starboard_data.blacklist: return await ctx.send(content["EMPTY_BLACKLIST_TEXT"], hidden=True) blacklist = starboard_data.blacklist if member and int(member) in blacklist["members"]: await starboard_data.remove_member_from_blacklist(int(member)) if role and int(role) in blacklist["roles"]: await starboard_data.remove_role_from_blacklist(int(role)) if channel and int(channel) in blacklist["channels"]: await starboard_data.remove_channel_from_blacklist(int(channel)) await ctx.send(content["BLACKLIST_REMOVED_TEXT"], hidden=True)
async def show_user_playlist(self, ctx: SlashContext, playlist: str, hidden: bool = True): await ctx.defer(hidden=hidden) guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) if playlist.endswith("GLOBAL"): data = await self.bot.mongo.get_global_data() else: data = guild_data user_data = await data.get_user(ctx.author_id) user_playlists = user_data.music_playlists if not user_playlists: raise NoData playlist_data = user_playlists.get(playlist) if not playlist_data: raise NoData content = get_content( "MUSIC_COMMANDS", guild_data.configuration.language)["MUSIC_PLAYLIST"] embed = Embed( title=content["PLAYLIST_TITLE_TEXT"].format(playlist=playlist), description="", color=guild_data.configuration.embed_color, ) embed.set_author(name=ctx.author.display_name, icon_url=ctx.author.avatar_url) for count, track in enumerate(playlist_data, start=1): embed.description += f"{count}. `{track}`\n" await ctx.send(embed=embed, hidden=hidden)
def _init_embeds(self, ctx: SlashContext, guild_data: GuildData, content: dict): translated_commands = None guild_language = guild_data.configuration.language if guild_language not in ["en-US", "English"]: translated_commands = get_content("TRANSLATED_COMMANDS", guild_language) commands_data = self._get_commands_data() embeds = [self._get_main_menu(ctx, guild_data, content)] cog_translations = content["PLUGINS"] for cog_name, cog in self.bot.cogs.items(): if self._cog_check(guild_data, cog, commands_data): continue embed = Embed( title=f"{cog_translations[cog_name.upper()]} | Asteroid Bot", description="", timestamp=datetime.datetime.utcnow(), color=DiscordColors.EMBED_COLOR, ) embed.set_footer( text=content["REQUIRED_BY_TEXT"].format(user=ctx.author), icon_url=ctx.author.avatar_url, ) embed.set_thumbnail(url=ctx.bot.user.avatar_url) embed.custom_id = cog_name embed = self._fill_embed_page(embed, commands_data[cog], content, translated_commands) embeds.append(embed) return embeds
def integration_tests(): # Create test website response = requests.get(dconf.WEBSITE_URL + '/test/create/') LOG.info(get_content(response)) # Upload training data LOG.info('Upload training data to no tuning session') upload_batch(result_dir='./integrationTests/data/', upload_code='ottertuneTestNoTuning') # wait celery periodic task finishes assert wait_pipeline_data_ready(), "Pipeline data failed" # Test DNN LOG.info('Test DNN (deep neural network)') upload_result(result_dir='./integrationTests/data/', prefix='0__', upload_code='ottertuneTestTuningDNN') response = get_result(upload_code='ottertuneTestTuningDNN') assert response['status'] == 'good' # Test GPR LOG.info('Test GPR (gaussian process regression)') upload_result(result_dir='./integrationTests/data/', prefix='0__', upload_code='ottertuneTestTuningGPR') response = get_result(upload_code='ottertuneTestTuningGPR') assert response['status'] == 'good' # Test DDPG LOG.info('Test DDPG (deep deterministic policy gradient)') upload_result(result_dir='./integrationTests/data/', prefix='0__', upload_code='ottertuneTestTuningDDPG') response = get_result(upload_code='ottertuneTestTuningDDPG') assert response['status'] == 'good' # Test DNN: 2rd iteration upload_result(result_dir='./integrationTests/data/', prefix='1__', upload_code='ottertuneTestTuningDNN') response = get_result(upload_code='ottertuneTestTuningDNN') assert response['status'] == 'good' # Test GPR: 2rd iteration upload_result(result_dir='./integrationTests/data/', prefix='1__', upload_code='ottertuneTestTuningGPR') response = get_result(upload_code='ottertuneTestTuningGPR') assert response['status'] == 'good' # Test DDPG: 2rd iteration upload_result(result_dir='./integrationTests/data/', prefix='1__', upload_code='ottertuneTestTuningDDPG') response = get_result(upload_code='ottertuneTestTuningDDPG') assert response['status'] == 'good' LOG.info("\n\nIntegration Tests: PASSED!!\n")
async def on_modal(self, ctx: ModalContext): if not any(_id in ctx.custom_id for _id in ["modal_new_tag", "modal_edit_tag"]): return custom_id, type, tag_name = ctx.custom_id.split("|") guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) content = get_content("TAG_ADD_COMMAND", guild_data.configuration.language) if custom_id == "modal_new_tag": await self._is_can_manage_tags(ctx) await guild_data.add_tag( name=tag_name, author_id=ctx.author_id, description=ctx.values["description"], is_embed=type == "embed", title=ctx.values["title"] if type == "embed" else "No title", ) await ctx.send( content["TAG_CREATED_TEXT"].format(tag_name=tag_name), hidden=True) elif custom_id == "modal_edit_tag": tag = None for tag in guild_data.tags: if tag.name == tag_name: break if type == "embed": await tag.set_title(ctx.values["title"]) await tag.set_description(ctx.values["description"]) await ctx.send( content["TAG_EDITED_TEXT"].format(tag_name=tag_name), hidden=True)
async def music_add_to_playlist(self, ctx: SlashContext, playlist: str, query: str = None, hidden: bool = False): if not query: player: lavalink.DefaultPlayer = self.bot.lavalink.player_manager.get( ctx.guild_id) if not player: raise NotPlaying query = player.current.title guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) if playlist.endswith("GLOBAL"): data = await self.bot.mongo.get_global_data() else: data = guild_data user_data = await data.get_user(ctx.author_id) await user_data.add_track_to_playlist(playlist, query) content = get_content( "MUSIC_COMMANDS", guild_data.configuration.language)["MUSIC_PLAYLIST"] embed = Embed( title=content["PLAYLIST_UPDATE_TITLE_TRACK"].format( playlist=playlist), description=content["ADDED_TEXT"].format(query=query), color=guild_data.configuration.embed_color, ) await ctx.send(embed=embed, hidden=hidden)
async def show_queue_musis(self, ctx: SlashContext): await ctx.defer(hidden=True) guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) player: lavalink.DefaultPlayer = self.bot.lavalink.player_manager.get( ctx.guild_id) content: dict = get_content("MUSIC_COMMANDS", guild_data.configuration.language) self.__check_music_status(ctx, player) if not player.queue: return await ctx.send(content["QUEUE_IS_EMPTY_TEXT"]) tracks = [ f"**{count}.** `{track.title}`" for count, track in enumerate(player.queue, start=1) ] embed = Embed( title=content["CURRENT_QUEUE_TITLE_TEXT"], description= f"**{content['CURRENT_SONG_TEXT']}:** `{player.current.title}`\n" + "\n".join(tracks), color=guild_data.configuration.embed_color, ) await ctx.send(embed=embed, hidden=True)
def decrypt_dailymotion(video_id): download_link = '' if video_id.find('n.com') != -1: video_id = utils.text_finder(video_id, '/', None, True) url = "http://www.dailymotion.com/embed/video/" + video_id try: log.debug('Trying url "%s"' % url) cont = utils.get_content(url) except BaseException as e: log.error('Could not get content') log.error(e) return '' json_plain = utils.text_finder(cont, 'var info = ', 'fields = ').strip()[:-1] json_cont = {} try: json_cont = json.loads(json_plain) except (ValueError, TypeError) as e: log.error('Could not get url content: ' + url) log.error(e) return '' if 'error' in json_cont and json_cont['error'] != None: log.error('Video not available.') return '' srcs = ['stream_h264_hd1080_url', 'stream_h264_hd_url', 'stream_h264_hq_url', 'stream_h264_url', 'stream_h264_ld_url'] for stype in srcs: if stype in json_cont and json_cont[stype] != None and download_link == '': log.debug('Found %s quality url' % stype) download_link = str(json_cont[stype]) break return download_link
def decrypt_acds(url): if url.find('http') != -1: file_id = utils.text_finder(url, '/', None, True) else: file_id = url api_link = "https://www.amazon.com/drive/v1/shares/%s?customerId=0&ContentType=JSON" % file_id json_cont = None try: log.debug('Retrieving json content from url "%s"' % api_link) json_plain = utils.get_content(api_link) if json_plain == '': log.error('[decrypt_acds] Could not get JSON content') return '' json_cont = json.loads(json_plain) except BaseException as e: log.error('Could not get JSON content') log.error('Error: ' + str(e)) return '' if 'message' in json_cont: log.error('Error detected: ' + json_cont['message']) return '' download_link = json_cont['nodeInfo']['tempLink'] return download_link
async def music_delete_from_playlist(self, ctx: SlashContext, playlist: str, name: str): await ctx.defer(hidden=True) guild_data = await self.bot.mongo.get_guild_data(ctx.guild_id) if playlist.endswith("GLOBAL"): data = await self.bot.mongo.get_global_data() else: data = guild_data user_data = await data.get_user(ctx.author_id) user_playlists = user_data.music_playlists if not user_playlists: raise NoData playlist_data = user_playlists.get(playlist) if not playlist_data: raise NoData await user_data.remove_track_from_playlist(playlist, name) content = get_content( "MUSIC_COMMANDS", guild_data.configuration.language)["MUSIC_PLAYLIST"] await ctx.send(content["MUSIC_DELETED"].format(name=name, playlist=playlist), hidden=True)
def ntp_answer(idChat, msg, silent): '''Get solution and parse it :param: id chat :param: user message :param: silent mode? ''' msg = clean_msg(msg) answer = get_solver(idChat, msg) if not silent: if answer: send_msg(idChat, answer['msg']) if answer['chat_id'] == -1: globals.redis_db.delete(idChat) elif answer['chat_id'] == -2: globals.redis_db.delete(idChat) linhas_apoio = get_content("/fs_scrapper/linhas_apoio", [], {}) if linhas_apoio: if 'assunto' in answer: linhas_apoio = process_linhas_apoio( linhas_apoio, answer['assunto']) pretty_print(idChat, "/fs_scrapper/linhas_apoio", linhas_apoio, True) else: send_msg(idChat, prefab_msgs["failed"][3]) else: send_msg(idChat, prefab_msgs["failed"][2])
async def info(self, ctx: SlashContext, hoyolab_uid: int = None): await ctx.defer() if hoyolab_uid is None: hoyolab_uid = await self._get_UID(ctx, is_game_uid=False) lang = await self.bot.get_guild_bot_lang(ctx.guild_id) genshin_lang = self.genshin_langs[lang] content = get_content("GENSHIN_INFO_COMMAND", lang) card = await self.genshin_client.get_record_card(hoyolab_uid, lang=genshin_lang) user_data = await self.genshin_client.get_user(int(card.uid), lang=genshin_lang) user_stats = user_data.stats description = f""" **{content['NICKNAME_TEXT']}: {card.nickname}** <:adventure_exp:876142502736965672> {content['ADVENTURE_RANK_TEXT']}: `{card.level}` <:achievements:871370992839176242> {content['ACHIEVEMENTS_TEXT']}: `{user_stats.achievements}` :mage: {content['CHARACTERS_TEXT']}: `{user_stats.characters}` <:spiral_abyss:871370970600968233> {content['SPIRAL_ABYSS_TEXT']}: `{user_stats.spiral_abyss}` """ embed = Embed( title=content["PLAYER_INFO_TEXT"], description=description, color=await self.bot.get_embed_color(ctx.guild.id), ) embed.set_footer(text=f"Hoyolab UID: {hoyolab_uid} | Game UID: {card.uid}") await ctx.send(embed=embed)
def __init__(self, source_path, translated_files_path, result_folder_path, named_html_entities): self.named_html_entities = named_html_entities self.source_path = source_path self.source_name = self.source_path.name self.type = self.source_path.suffix self.translated_files_path = translated_files_path self.target_path = self.get_target_path() self.result_path = result_folder_path / self.source_name self.source_content = utils.get_content(self.source_path, self.type) self.target_content = utils.get_content(self.target_path, self.type) self.result_content = self.target_content self.source_placeholders = utils.get_placeholders(self.source_content) self.target_placeholders = utils.get_placeholders(self.target_content)
def get(self): try: article_url = self.get_argument('url', '') weblog.info('article url %s ' % article_url) if not article_url: return self.write({'status': 1, 'message': 'no url params'}) content = utils.get_content(article_url) article = extractor.parse_pubnum_from_article(content) weblog.info('extrat article info %s ' % article) if not article or 'biz' not in article or 'originid' not in article: return self.write({ 'status': 1, 'message': 'parse pubnum error!' }) pubnum = dao.get_pubnum_by_originid(mydqldb, article['originid']) if pubnum: return self.write({'status': 1, 'message': 'pubnum exists!'}) status = dao.save_wecaht_pubnum(mydqldb, article) weblog.info('save wecaht pubnum status: %s' % status) if status is False: return self.write({ 'status': 1, 'message': 'write pubnum error!!' }) self.write({'status': 0, 'message': 'success!'}) except Exception as e: self.write({'status': 1, 'message': traceback.format_exc()}) weblog.error(traceback.format_exc())
def from_dict(cls, d): """ Override default, adding the capture of members. """ o = super(DistributionList, cls).from_dict(d) o.members = [] if d.has_key('dlm'): o.members = [utils.get_content(member) for member in utils.as_list(d["dlm"])] return o
def __init__(self, data_file, sub_table = 0): """ """ if os.path.exists(self.pickle_file): if os.path.getmtime(self.pickle_file) > os.path.getmtime(data_file): try: with open(self.pickle_file, 'rb') as data_bin: self.all_content = pickle.load(data_bin) if sub_table > 0: self.all_content = self.all_content[:sub_table] except IOError as err: print ("File error: " + str(err)) else: p = ParsePage(self.doz_by_raffle) p.feed(utils.get_content(data_file)) self.all_content = p.get_full_data() if sub_table > 0: self.all_content = self.all_content[:sub_table] try: with open(self.pickle_file, 'wb') as data_bin: pickle.dump(self.all_content, data_bin) except IOError as err: print ("File error: " + str(err)) self.all_stat = [] self.even_odd = {} self.doze = {} self.even_odd = {'e' + str(i) + 'xo' + str(j): [] for i in range(0, self.doz_by_raffle + 1) for j in reversed(range(0, self.doz_by_raffle + 1)) if (i+j) == self.doz_by_raffle} self.doze = {str(i) + 'x': [] for i in range(0, self.dozen_dozens + 1)} self.unit = {'x' + str(i): [] for i in range(0, 10)} self.init_stat_table() self.build_occur_list() self.build_delay_list() self.build_freq_dict() self.more_often_num() self.last_time() self.most_delay() self.aver_delay() self.fill_up_stand_dev() self.fill_up_stand_sco() self.rule_even_by_odd() self.more_often_dozen() self.more_often_unit()
def __init__(self,link): self.link = link self.title = "" self.time = 0 self.content = "" self.keywords = "" self.refer = [] self.status = False # 是否解析成功 # 检查是否在已解析的连接里面 # 检查是否在无法解析的名单内 if link.find('http://') == -1: return # invalid link base_url = 'http://' + link.split('/')[2] # unparse_check = store.find(UnparsePage_m, UnparsePage_m.url == base_url.decode('utf-8')) # if unparse_check.count() != 0: # print "can not parse this link" # return self.pq = "" try: self.pq = pq(url=link).make_links_absolute() #可能会解析失败 except Exception as err: print "failed to open this link " + link if self.pq == "": return # get title self.title = get_title(self.pq) self.time = time.time() self.content = get_content(self.pq) self.refer = get_refer(self.pq) if len(self.title) == 0 or \ len(self.content) == 0 or len(self.refer) == 0: # 无法成功解析 print "can not parse " + link # 把网址添加异常网站数据库 mpage = UnparsePage_m() mpage.url = base_url.decode('utf-8') mpage.save() self.keywords = '' return else: # get keywords self.keywords = jieba.cut_for_search(self.title) self.status = True
def decrypt_vkontakte(oid, id_ = None, hash_ = None): if id_ == None and hash_ == None: url = oid else: url = "http://vk.com/video_ext.php?oid=%s&id=%s&hash=%s" % (oid, id_, hash_) download_link = '' try: log.debug('Trying url "%s"' % url) cont = utils.get_content(url) except BaseException as e: log.error('Could not get content') log.error(e) return '' json_plain = utils.text_finder(cont, 'var vars = ', 'var fixed_').strip() if json_plain == '': print('Could not get link: empty JSON') return '' try: json_cont = json.loads(json_plain) except ValueError as e: log.error('Could not get link for url "%s"' % url) # Es posible que se obtenga el siguiente mensaje desde VK: # "Embedding video of the given user or the group on external sites is # prohibited due to copyrighting infringement or pornographic content." if cont.find('external sites is prohibited'): log.error('Video uploader has linking from external sites prohibited') return '' for q in ['1080', '720', '480', '360', '240']: if 'url' + q in json_cont and download_link == '': log.debug('Found %sp quality url' % q) download_link = json_cont['url' + q] break return download_link.replace('\n', '')
if os.path.exists('data/facil.pickle'): self.updated = os.path.getmtime(data_file) if os.path.getmtime('data/facil.pickle') > os.path.getmtime(data_file): try: data_bin = open('../data/facil.pickle', 'rb') self.all_content = pickle.load(data_bin) if sub_table > 0: self.all_content = self.all_content[:sub_table] except IOError, err: print ("File error: " + str(err)) else: data_bin.close() else: p = ParsePage(self.doz_by_raffle) p.feed(utils.get_content(data_file)) self.all_content = p.get_full_data() if sub_table > 0: self.all_content = self.all_content[:sub_table] try: data_bin = open('../data/facil.pickle', 'wb') pickle.dump(self.all_content, data_bin) except IOError, err: print ("File error: " + str(err)) else: data_bin.close() self.init_stat_table() self.even_odd = {"e0xo15": [], "e1xo14": [], "e2xo13": [], "e3xo12": [], "e4xo11": [], "e5xo10": [], "e6xo9": [], "e7xo8": [], "e8xo7": [], "e9xo6": [], "e10xo5": [], "e11xo4": [], "e12xo3": [], "e13xo2": [], "e14xo1": [], "e15xo0": []} self.doze = {"0x": [], "1x": [], "2x": []}
def decrypt_videobam(url): download_link = '' cont = utils.get_content(url) download_link = unquote(utils.text_finder(cont, '","url":"', '"')).replace('\\/', '/') return download_link
def _post(ES_URL, query): return get_content(requests.post(ES_URL, data=query).json())
# get or create a source_doc record for the co-sponsor page source_doc = get_or_create_source_doc( file_name=the_file, name="{0.bill_type.id} {0.number} co-sponsors".format(bill), session=bill.session, url=bill.co_sponsor_link, parent=bill.source_doc, chamber="S", ) content = None # load the content from the co-sponsor page while content == None: try: content = get_content(source_doc, requests_session) except requests.exceptions.ConnectionError as e: print e print " Connection failed. Retrying..." requests_session = requests.session() except Exception as e: print e # loop over the links extracted from the co-sponsor page for link in extract_links(content, bill.co_sponsor_link): if "District" in link["name"]: # parse the name out of the link text parse_link_name = parse_name(link["name"]) if parse_link_name["success"]:
def handle_request(response): global i, toresult toresult.append(get_content(json.loads(response.body))) i -= 1 if i == 0: ioloop.IOLoop.instance().stop()