async def stop(self, ctx): guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) commandchannel = guild.get_channel(int(get_section("music").get("commandChannel"))) if ctx.channel == commandchannel: voice = get(self.bot.voice_clients, guild=ctx.guild) if voice.is_playing() and not voice.is_paused(): voice.stop() await ctx.send("Stopping Music")
def javac_flags(self): global_config = config.get_section('global_config') java_config = config.get_section('java_config') debug_info_level = global_config['debug_info_level'] debug_info_options = java_config['debug_info_levels'][debug_info_level] warnings = self.data.get('warnings') if not warnings: warnings = java_config['warnings'] return debug_info_options + warnings
async def on_raw_reaction_remove(self, payload): if payload.channel_id == int( get_section("bot").get("rank_channel_id")): discorduser = utils.get(Client.get_all_members(self.bot), id=payload.user_id) await discorduser.add_roles( utils.get(discorduser.guild.roles, id=int( get_section("bot").get("ranks").get( payload.emoji.name))))
async def on_message(self, message): guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) if message.channel == guild.get_channel( int(get_section("api").get("pinnedChannel"))): if any([ keyword in message.content.upper() for keyword in (':BUYING:', ':SELLING:', ':WTB:', ':WTS:', ':TRADING:', ':WTT:') ]): await message.pin()
async def volume(self, ctx, volume: int): guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) commandchannel = guild.get_channel(int(get_section("music").get("commandChannel"))) voice = get(self.bot.voice_clients, guild=ctx.guild) if ctx.channel == commandchannel and not voice.is_paused(): if volume not in range(19, 201): await ctx.send("Volume has to be between 20 and 200") else: voice.source = discord.PCMVolumeTransformer(voice.source) voice.source.volume = float(volume/100) await ctx.send("Volume set to " + str(volume))
async def on_ready(self): try: guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) channel = guild.get_channel(int(get_section("music").get("channel"))) voice = get(self.bot.voice_clients, guild=guild) if voice and voice.is_connected(): await voice.move_to(channel) else: voice = await channel.connect() except Exception as e: print(e)
def __init__(self, bot): self.bot = bot self.last_images = deque(maxlen=10) self.cse_api_key = get_section("show_image").get("cse_api_key") self.cse_cx = get_section("show_image").get("cse_cx") if not self.cse_api_key: raise Exception("Key 'cse_api_key' not found or not set.") if not self.cse_cx: raise Exception("Key 'cse_cx' not found or not set.")
async def pause(self, ctx): guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) commandchannel = guild.get_channel(int(get_section("music").get("commandChannel"))) if ctx.channel == commandchannel: voice = get(self.bot.voice_clients, guild=ctx.guild) if voice.is_playing() and not voice.is_paused(): voice.pause() await ctx.send("Pausing music, run pause command again to resume") else: if voice.is_paused(): voice.resume() await ctx.send("Resuming music")
async def getPins(self, interval): print("getPins Running") guild = self.bot.get_guild(id=int(get_section("bot").get("guild"))) channel = guild.get_channel( int(get_section("api").get("pinnedChannel"))) await self.bot.wait_until_ready() cron = CronTab(interval) repo = Github(get_section("api").get("gitKey")).get_repo( get_section("api").get("gitRepo")) obj = [] eurl = "" aurl = "" while True: async for msg in channel.history(limit=5000): if (msg.pinned): if msg.embeds: eurl = [] eurl.clear() for embed in msg.embeds: eurl.append(str(embed.url)) if msg.attachments: aurl = [] for attachment in msg.attachments: aurl.append(str(attachment.url)) tmp = { "user": str(msg.author), "message": str(msg.content), "created": str(msg.created_at), "avatar_url": str(msg.author.avatar_url), "message_id": str(msg.id), "embeds": str(eurl), "attachments": aurl } obj.append(tmp) eurl = "" aurl = "" file = repo.get_contents("market.json") encode = base64.b64encode(str(json.dumps(obj)).encode("utf-8")) if str(file.content).replace('\n', '') == str(encode, "utf-8"): print("Data is unchanged, no commit was made") await asyncio.sleep(cron.next(default_utc=True)) obj.clear() tmp.clear() else: repo.update_file("market.json", str(datetime.datetime.now()), json.dumps(obj), file.sha) print("Updated market.json") await asyncio.sleep(cron.next(default_utc=True)) obj.clear() tmp.clear()
def get_flags_except_warning(self): """Get the flags that are not warning flags. """ global_config = config.get_section('global_config') cc_config = config.get_section('cc_config') if not self.options.m: flags_except_warning = [] linkflags = [] else: flags_except_warning = ['-m%s' % self.options.m] linkflags = ['-m%s' % self.options.m] flags_except_warning.append('-pipe') # Debugging information setting debug_info_level = global_config['debug_info_level'] debug_info_options = cc_config['debug_info_levels'][debug_info_level] flags_except_warning += debug_info_options # Option debugging flags if self.options.profile == 'debug': flags_except_warning.append('-fstack-protector') elif self.options.profile == 'release': flags_except_warning.append('-DNDEBUG') flags_except_warning += [ '-D_FILE_OFFSET_BITS=64', '-D__STDC_CONSTANT_MACROS', '-D__STDC_FORMAT_MACROS', '-D__STDC_LIMIT_MACROS', ] if getattr(self.options, 'gprof', False): flags_except_warning.append('-pg') linkflags.append('-pg') if getattr(self.options, 'coverage', False): if self.gcc_version > '4.1': flags_except_warning.append('--coverage') linkflags.append('--coverage') else: flags_except_warning.append('-fprofile-arcs') flags_except_warning.append('-ftest-coverage') linkflags += [ '-Wl,--whole-archive', '-lgcov', '-Wl,--no-whole-archive' ] flags_except_warning = self._filter_out_invalid_flags( flags_except_warning) return (flags_except_warning, linkflags)
def get_flags_except_warning(self): """Get the flags that are not warning flags. """ global_config = config.get_section('global_config') cc_config = config.get_section('cc_config') if not self.options.m: flags_except_warning = [] linkflags = [] else: flags_except_warning = ['-m%s' % self.options.m] linkflags = ['-m%s' % self.options.m] flags_except_warning.append('-pipe') # Debugging information setting debug_info_level = global_config['debug_info_level'] debug_info_options = cc_config['debug_info_levels'][debug_info_level] flags_except_warning += debug_info_options # Option debugging flags if self.options.profile == 'debug': flags_except_warning.append('-fstack-protector') elif self.options.profile == 'release': flags_except_warning.append('-DNDEBUG') flags_except_warning += [ '-D_FILE_OFFSET_BITS=64', '-D__STDC_CONSTANT_MACROS', '-D__STDC_FORMAT_MACROS', '-D__STDC_LIMIT_MACROS', ] if getattr(self.options, 'gprof', False): flags_except_warning.append('-pg') linkflags.append('-pg') if getattr(self.options, 'coverage', False): if self.gcc_version > '4.1': flags_except_warning.append('--coverage') linkflags.append('--coverage') else: flags_except_warning.append('-fprofile-arcs') flags_except_warning.append('-ftest-coverage') linkflags += ['-Wl,--whole-archive', '-lgcov', '-Wl,--no-whole-archive'] flags_except_warning = self._filter_out_invalid_flags( flags_except_warning) return (flags_except_warning, linkflags)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ core_configs = config.get_section(config.config_ini_section) core_configs['sqlalchemy.url'] = settings.SQLALCHEMY_DATABASE_URI engine = engine_from_config( core_configs, prefix='sqlalchemy.', poolclass=pool.NullPool) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close()
async def addreact(self, ctx, messageid: int): """Adds base reactions to specified message (Admin only)""" if messageid is not None: msg = await ctx.fetch_message(id=messageid) for emojiId in get_section("bot").get("ranks"): await msg.add_reaction(emoji=str(emojiId))
def _generate_proto_builders(self): self._add_rule('time_value = Value("%s")' % time.asctime()) proto_config = config.get_section('proto_library_config') protoc_bin = proto_config['protoc'] protoc_java_bin = protoc_bin if proto_config['protoc_java']: protoc_java_bin = proto_config['protoc_java'] protobuf_path = proto_config['protobuf_path'] protobuf_incs_str = protoc_import_path_option( proto_config['protobuf_incs']) protobuf_java_incs = protobuf_incs_str if proto_config['protobuf_java_incs']: protobuf_java_incs = protoc_import_path_option( proto_config['protobuf_java_incs']) protobuf_php_path = proto_config['protobuf_php_path'] protoc_php_plugin = proto_config['protoc_php_plugin'] protoc_go_plugin = proto_config['protoc_go_plugin'] self._add_rule( 'scons_helper.setup_proto_builders(top_env, "%s", protoc_bin="%s", ' 'protoc_java_bin="%s", protobuf_path="%s", ' 'protobuf_incs_str="%s", protobuf_java_incs="%s", ' 'protobuf_php_path="%s", protoc_php_plugin="%s", ' 'protoc_go_plugin="%s")' % (self.build_dir, protoc_bin, protoc_java_bin, protobuf_path, protobuf_incs_str, protobuf_java_incs, protobuf_php_path, protoc_php_plugin, protoc_go_plugin))
class Pinmanager(commands.Cog): least_role_needed = get_section("bot").get("admin_minimum_role") def __init__(self, bot): self.bot = bot def has_at_least_role(name): def predicate(ctx): msg = ctx.message ch = msg.channel if type(ch) == DMChannel: return False role = discord.utils.get(ctx.guild.roles, name=name) return any([x >= role for x in msg.author.roles]) return commands.check(predicate) @commands.command(aliases=["cpin", "clean"]) @has_at_least_role(least_role_needed) async def clearPins(self, ctx): removedPins = 0 await ctx.send("Attempting to remove old pins, this may take a while") async for msg in ctx.message.channel.history(limit=5000): if (msg.pinned & ((datetime.datetime.now() - msg.created_at).days > 30)): await msg.unpin() removedPins = removedPins + 1 await ctx.send("Removed " + str(removedPins) + " old pins!")
async def removerole(self, ctx, member: discord.Member, *, rolename: discord.Role): await ctx.channel.trigger_typing() for x in get_section("managedroles"): sectionRole = find(lambda r: r.id == x, ctx.message.author.roles) if sectionRole in ctx.message.author.roles: if str(rolename.id) in get_section("managedroles").get(sectionRole.id): if rolename in member.roles: await member.remove_roles(rolename) await ctx.send("Removed " + member.display_name + " from " + rolename.name) break else: await ctx.send("⚠ " + member.display_name + " does not have " + rolename.name) break else: await ctx.send("⚠ You're not allowed to remove " + rolename.name + " from other users") break
async def addrole(self, ctx, member: discord.Member, *, rolename: discord.Role): await ctx.channel.trigger_typing() for x in get_section("managedroles"): sectionRole = find(lambda r: r.id == x, ctx.message.author.roles) if sectionRole in ctx.message.author.roles: if str(rolename.id) in get_section("managedroles").get(sectionRole.id): if rolename in member.roles: await ctx.send("⚠ " + member.display_name + " already has " + rolename.name) break else: await ctx.send("Added " + member.display_name + " to " + rolename.name) await member.add_roles(rolename) break else: await ctx.send("⚠ You're not allowed to give other users " + rolename.name) break
def generate_proto_rules(self): proto_config = config.get_section('proto_library_config') protoc = proto_config['protoc'] protoc_java = protoc if proto_config['protoc_java']: protoc_java = proto_config['protoc_java'] protobuf_incs = protoc_import_path_option( proto_config['protobuf_incs']) protobuf_java_incs = protobuf_incs if proto_config['protobuf_java_incs']: protobuf_java_incs = protoc_import_path_option( proto_config['protobuf_java_incs']) self._add_rule(''' protocflags = protoccpppluginflags = protocjavapluginflags = protocpythonpluginflags = ''') self.generate_rule( name='proto', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--cpp_out=%s ${protocflags} ${protoccpppluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOC ${in}') self.generate_rule( name='protojava', command='%s --proto_path=. %s --java_out=%s/`dirname ${in}` ' '${protocjavapluginflags} ${in}' % (protoc_java, protobuf_java_incs, self.build_dir), description='PROTOCJAVA ${in}') self.generate_rule(name='protopython', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--python_out=%s ${protocpythonpluginflags} ${in}' % (protoc, protobuf_incs, self.build_dir), description='PROTOCPYTHON ${in}') self.generate_rule( name='protodescriptors', command='%s --proto_path=. %s -I=`dirname ${first}` ' '--descriptor_set_out=${out} --include_imports ' '--include_source_info ${in}' % (protoc, protobuf_incs), description='PROTODESCRIPTORS ${in}') protoc_go_plugin = proto_config['protoc_go_plugin'] if protoc_go_plugin: go_home = config.get_item('go_config', 'go_home') if not go_home: console.error_exit( 'go_home is not configured in either BLADE_ROOT or BLADE_ROOT.local.' ) outdir = os.path.join(go_home, 'src') subplugins = proto_config['protoc_go_subplugins'] if subplugins: go_out = 'plugins=%s:%s' % ('+'.join(subplugins), outdir) else: go_out = outdir self.generate_rule( name='protogo', command='%s --proto_path=. %s -I=`dirname ${in}` ' '--plugin=protoc-gen-go=%s --go_out=%s ${in}' % (protoc, protobuf_incs, protoc_go_plugin, go_out), description='PROTOCGOLANG ${in}')
def __init__(self, name, srcs, deps, optimize, deprecated, blade, kwargs): """Init method. Init the thrift target. """ srcs = var_to_list(srcs) self._check_thrift_srcs_name(srcs) CcTarget.__init__(self, name, 'fbthrift_library', srcs, deps, None, '', [], [], [], optimize, [], [], blade, kwargs) fbthrift_config = config.get_section('fbthrift_config') fbthrift_libs = var_to_list(fbthrift_config['fbthrift_libs']) fbthrift1_bin = fbthrift_config['fbthrift1'] fbthrift2_bin = fbthrift_config['fbthrift2'] # Hardcode deps rule to thrift libraries. self._add_hardcode_library(fbthrift_libs) # Link all the symbols by default self.data['link_all_symbols'] = True # For each thrift file initialize a FBThriftHelper, which will be used # to get the source files generated from thrift file. self.fbthrift_helpers = {} for src in srcs: self.fbthrift_helpers[src] = FBThriftHelper( os.path.join(self.path, src))
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = [ 'java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd_str) if subprocess.call(cmd_str, shell=True): console.warning('Failed to generate java coverage report')
def protoc_plugin_flags(self): protoc_plugin_config = config.get_section('protoc_plugin_config') flags = {} for plugin in self.data['protoc_plugins']: p = protoc_plugin_config[plugin] for language in p.code_generation: flags[language] = p.protoc_plugin_flag(self.build_path) return flags
def _generate_fbthrift_builders(self): fbthrift_config = config.get_section('fbthrift_config') fbthrift1_bin = fbthrift_config['fbthrift1'] fbthrift2_bin = fbthrift_config['fbthrift2'] fbthrift_incs_str = _incs_list_to_string(fbthrift_config['fbthrift_incs']) self._add_rule('scons_helper.setup_fbthrift_builders(top_env, "%s", ' 'fbthrift1_bin="%s", fbthrift2_bin="%s", fbthrift_incs_str="%s")' % ( self.build_dir, fbthrift1_bin, fbthrift2_bin, fbthrift_incs_str))
def _generate_fast_link_builders(self): """Generates fast link builders if it is specified in blade bash. """ link_config = config.get_section('link_config') enable_dccc = link_config['enable_dccc'] if link_config['link_on_tmp']: if (not enable_dccc) or ( enable_dccc and not self.build_environment.dccc_env_prepared): self._add_rule('scons_helper.setup_fast_link_builders(top_env)')
def __init__(self, bot): self.bot = bot self.currencies = None self.api_key = get_section("currency").get("api_key") if not self.api_key: raise Exception( "Key 'api_key' not found or not set. To use this cog, get a key from https://www.currencyconverterapi.com/" )
class reactionranker(commands.Cog): least_role_needed = get_section("bot").get("admin_minimum_role") def __init__(self, bot): self.bot = bot def has_at_least_role(name): def predicate(ctx): msg = ctx.message ch = msg.channel if type(ch) == DMChannel: return False role = utils.get(ctx.guild.roles, name=name) return any([x >= role for x in msg.author.roles]) return commands.check(predicate) @commands.command(aliases=["addreactions"]) @has_at_least_role(least_role_needed) async def addreact(self, ctx, messageid: int): """Adds base reactions to specified message (Admin only)""" if messageid is not None: msg = await ctx.fetch_message(id=messageid) for emojiId in get_section("bot").get("ranks"): await msg.add_reaction(emoji=str(emojiId)) @commands.Cog.listener() async def on_raw_reaction_add(self, payload): if payload.channel_id == int( get_section("bot").get("rank_channel_id")): discorduser = utils.get(Client.get_all_members(self.bot), id=payload.user_id) if utils.get(discorduser.guild.roles, id=int( get_section("bot").get("ranks").get( payload.emoji.name))) in discorduser.roles: await discorduser.remove_roles( utils.get(discorduser.guild.roles, id=int( get_section("bot").get("ranks").get( payload.emoji.name)))) @commands.Cog.listener() async def on_raw_reaction_remove(self, payload): if payload.channel_id == int( get_section("bot").get("rank_channel_id")): discorduser = utils.get(Client.get_all_members(self.bot), id=payload.user_id) await discorduser.add_roles( utils.get(discorduser.guild.roles, id=int( get_section("bot").get("ranks").get( payload.emoji.name))))
def _protoc_plugin_rules(self): """Generate scons rules for each protoc plugin. """ env_name = self._env_name() protoc_plugin_config = config.get_section('protoc_plugin_config') for plugin in self.data['protoc_plugins']: p = protoc_plugin_config[plugin] for language in p.code_generation: self._write_rule('%s.Append(PROTOC%sPLUGINFLAGS = "%s ")' % (env_name, language.upper(), p.protoc_plugin_flag(self.build_path)))
def _generate_thrift_builders(self): # Generate thrift library builders. thrift_config = config.get_section('thrift_config') thrift_incs_str = _incs_list_to_string(thrift_config['thrift_incs']) thrift_bin = thrift_config['thrift'] if thrift_bin.startswith('//'): thrift_bin = thrift_bin.replace('//', self.build_dir + '/') thrift_bin = thrift_bin.replace(':', '/') self._add_rule( 'scons_helper.setup_thrift_builders(top_env, build_dir="%s", thrift_bin="%s", thrift_incs_str="%s")' % (self.build_dir, thrift_bin, thrift_incs_str))
def get_warning_flags(self): """Get the warning flags. """ cc_config = config.get_section('cc_config') cppflags = cc_config['warnings'] cxxflags = cc_config['cxx_warnings'] cflags = cc_config['c_warnings'] filtered_cppflags = self._filter_out_invalid_flags(cppflags) filtered_cxxflags = self._filter_out_invalid_flags(cxxflags, 'c++') filtered_cflags = self._filter_out_invalid_flags(cflags, 'c') return (filtered_cppflags, filtered_cxxflags, filtered_cflags)
def generate_thrift_rules(self): thrift_config = config.get_section('thrift_config') incs = _incs_list_to_string(thrift_config['thrift_incs']) thrift = thrift_config['thrift'] if thrift.startswith('//'): thrift = thrift.replace('//', self.build_dir + '/') thrift = thrift.replace(':', '/') self.generate_rule(name='thrift', command='%s --gen cpp:include_prefix,pure_enums ' '-I . %s -I `dirname ${in}` ' '-out %s/`dirname ${in}` ${in}' % (thrift, incs, self.build_dir), description='THRIFT ${in}')
def scalac_flags(self): flags = [] scala_config = config.get_section('scala_config') target_platform = scala_config['target_platform'] if target_platform: flags.append('-target:%s' % target_platform) warnings = self.data.get('warnings') if warnings: flags.append(warnings) global_warnings = scala_config['warnings'] if global_warnings: flags.append(global_warnings) return flags
def generate_java_scala_rules(self): java_config = config.get_section('java_config') self.generate_javac_rules(java_config) self.generate_java_resource_rules() jar = self.get_java_command(java_config, 'jar') args = '%s ${out} ${in}' % jar self.generate_rule(name='javajar', command=self.generate_toolchain_command('java_jar', suffix=args), description='JAVA JAR ${out}') self.generate_java_test_rules() self.generate_rule(name='fatjar', command=self.generate_toolchain_command('java_fatjar'), description='FAT JAR ${out}') self.generate_java_binary_rules() self.generate_scala_rules(java_config)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectable = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata ) with context.begin_transaction(): context.run_migrations()
def __init__(self, log_dir): """Init method. """ if not os.path.exists(log_dir): os.makedirs(log_dir) self.__log_dir = log_dir # key: (id, classifier) # id: jar id in the format group:artifact:version # value: an instance of MavenArtifact self.__jar_database = {} java_config = config.get_section('java_config') self.__maven = java_config.get('maven') self.__central_repository = java_config.get('maven_central') # Local repository is set to the maven default directory # and could not be configured currently local_repository = '~/.m2/repository' self.__local_repository = os.path.expanduser(local_repository) self.__need_check_config = True # Download the snapshot artifact daily self.__build_time = time.time() self.__one_day_interval = 86400