def __init__(self, file_name): globalVars = Globals() self._IO_DIR = globalVars.IO_DIR self.logger = FileLogger("logparserreader") self.file_name = file_name self.lambdaFuncs = Lambdas() self.utilFuncs = Funcs()
def __init__(self): globalVars = Globals() self.utilFunctions = Funcs() self.lambdaFuncs = Lambdas() self.__ROOT_DIR = globalVars.ROOT_DIR self.__RESOURCE_DIR = "resource" self.__VALIDATION_CONFIG_KEY = "validator-param" self.logger = FileLogger("validationreader") config_parser = ConfigParser() self.file_name = config_parser.get_config_value( self.__VALIDATION_CONFIG_KEY) self.obj_sep = config_parser.get_config_value("validator-sep")
def __init__(self): globalVars = Globals() self.utilFuncs = Funcs() self.lambdaFuncs = Lambdas() config_parser = ConfigParser() self.logger = FileLogger("generatorreader") self.__ROOT_DIR, self.__RESOURCE_DIR = globalVars.ROOT_DIR, "resource" self.__GENERATOR_CONFIG_KEY = "generator-param" self.obj_sep = config_parser.get_config_value("generator-obj-sep") self.file_name = config_parser.get_config_value( self.__GENERATOR_CONFIG_KEY)
def init_funcs(bot): #Globals bot.globals = Object() bot.globals.on_ready_write = False bot.globals.already_ready = False bot.globals.command_errors = False bot.globals.cooldown_sent = {} bot.globals.command_spam = {} bot.globals.spam_sent = {} bot.globals.command_deleted_sent = {} #MySQL global cursor, engine, Session if bot.dev_mode: db = 'discord_dev' elif bot.self_bot: db = 'discord_self' else: db = 'discord' engine = create_engine( 'mysql+pymysql://{0}:@localhost/{1}?charset=utf8mb4'.format( bot.shard_id if not bot.self_bot else '', db), encoding='utf8') session_factory = sessionmaker(bind=engine) Session = scoped_session(session_factory) bot.mysql = Object() engine = bot.mysql.engine = engine cursor = bot.mysql.cursor = bot.get_cursor #Utils bot.pruned_messages = [] funcs = Funcs(bot, cursor) bot.funcs = funcs bot.escape = funcs.escape bot.get_prefix = funcs.get_prefix bot.is_blacklisted = funcs.is_blacklisted bot.command_check = funcs.command_check bot.process_commands = funcs.process_commands bot.write_last_time = funcs.write_last_time bot.get_last_time = funcs.get_last_time bot.restart_program = funcs.restart_program bot.queue_message = funcs.queue_message bot.get_images = funcs.get_images bot.truncate = funcs.truncate bot.proxy_request = funcs.proxy_request bot.run_process = funcs.run_process bot.get_json = funcs.get_json bot.bytes_download = funcs.bytes_download bot.download = funcs.download bot.isimage = funcs.isimage bot.isgif = funcs.isgif bot.google_keys = funcs.google_keys bot.repl = funcs.repl bot.command_help = funcs.command_help bot.random = funcs.random bot.get_text = funcs.get_text #Paths global discord_path, files_path bot.path = Object() discord_path = bot.path.discord = funcs.discord_path files_path = bot.path.files = funcs.files_path
def __init__(self): self.token = os.getenv('bot_token') self.funcs = Funcs() super().__init__(command_prefix=call_prefix, description="ArchBot") for ext in extensions: try: self.load_extension(ext) print(f'Added cog: {ext}') except Exception as e: print(f'Failed to load extension: {ext}.', file=sys.stderr) traceback.print_exc()
def __init__(self, bot): self.bot = bot self.db = Database(db_name) self.funcs = Funcs()
class LogFileParser(object): def __init__(self, file_name): globalVars = Globals() self._IO_DIR = globalVars.IO_DIR self.logger = FileLogger("logparserreader") self.file_name = file_name self.lambdaFuncs = Lambdas() self.utilFuncs = Funcs() def get_file_path(self): try: fp = os.path.join(self._IO_DIR, self.file_name) if os.path.exists(fp): return fp else: raise LogParserFileNotFound(self.file_name) except: raise def read_log(self): try: file_path = self.get_file_path() with open(file_path, "r") as fr: raw_data = fr.readlines() return raw_data except LogParserFileNotFound: raise except: raise def parse_log(self): try: self.logger.info("Start Log Settings Parser") self.logger.debug("Read Log Settings File {0}".format( self.file_name)) log_lines = self.read_log() self.logger.debug("Find Log File Pattern Sqoop / Bee") log_filtered_lines = list( filter(self.lambdaFuncs.fn_sqoop_filter_lines, log_lines)) if len(log_filtered_lines) > 0: log_splitted_lines = list( map(self.lambdaFuncs.fn_sqoop_splitter, log_filtered_lines)) return [ self.utilFuncs.loglines2dict_sqoop(x) for x in log_splitted_lines ] else: log_filtered_lines = list( filter(self.lambdaFuncs.fn_bee_filter_lines, log_lines)) if len(log_filtered_lines) > 2: log_splitted_lines = list( map(self.lambdaFuncs.fn_bee_splitter, log_filtered_lines)) return [ self.utilFuncs.loglines2dict_bee(x) for x in log_splitted_lines ] else: log_filtered_lines = list( filter(self.lambdaFuncs.fn_hive_filter_lines, log_lines)) log_splitted_lines = list( map(self.lambdaFuncs.fn_hive_splitter, log_filtered_lines)) return [ self.utilFuncs.loglines2dict_hive(x) for x in log_splitted_lines ] except Exception as e: self.logger.error("Config File Parsing Exception" + e) raise
def __init__(self, job_name): self.job_name = job_name self.file_ext = "txt" self.sqoop_stmt = "" self.utilFuncs = Funcs()
class SqoopGenerator(object): def __init__(self, job_name): self.job_name = job_name self.file_ext = "txt" self.sqoop_stmt = "" self.utilFuncs = Funcs() def generate(self): try: genTemplates = GeneratorTemplate() genParser = GeneratorParser() obj_data = genParser.parse_generator(self.job_name) sqoop_args = self.fill_args(obj_data) if self.job_name == "__INIT__": sqoop_template = Template(genTemplates.basic_import_init()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif obj_data is None: sqoop_template = Template(genTemplates.basic_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "BASIC_IMP_CMD": sqoop_template = Template(genTemplates.basic_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "BASIC_IMP_OPT": sqoop_template = Template( genTemplates.basic_import_option_noinit()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "BASIC_IMP_OPT_INIT": sqoop_template = Template(genTemplates.basic_import_option()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "SPLIT_IMP_CMD": sqoop_template = Template(genTemplates.split_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "SPLIT_IMP_OPT": sqoop_template = Template( genTemplates.split_import_option_noinit()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "SPLIT_IMP_OPT_INIT": sqoop_template = Template(genTemplates.split_import_option()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "DIRECT_IMP_CMD": sqoop_template = Template(genTemplates.direct_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "DIRECT_IMP_OPT": sqoop_template = Template( genTemplates.direct_import_option_noinit()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "DIRECT_IMP_OPT_INIT": sqoop_template = Template(genTemplates.direct_import_option()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "txt" elif self.utilFuncs.get_generator_value( obj_data, "TEMPLATE")[0] == "BASIC_EXP_CMD": sqoop_template = Template(genTemplates.basic_export_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" else: sqoop_template = Template(genTemplates.basic_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" except (GeneratorFileNoContent, GeneratorFileNotFound): sqoop_template = Template(genTemplates.basic_import_command()) self.sqoop_stmt = sqoop_template.substitute(sqoop_args) self.file_ext = "sh" finally: tmp_sqoop_stmt = self.sqoop_stmt genWriter = GeneratorWriter() genWriter.write2file(self.job_name, self.file_ext, self.sqoop_stmt) return (tmp_sqoop_stmt) def fill_args(self, obj_data): sqoop_args = dict() confParser = ConfigParser() sqoop_args["connstr"] = confParser.get_config_value("sqoop-conn-str") sqoop_args["username"] = confParser.get_config_value("db-user") sqoop_args["pwdfile"] = confParser.get_config_value("db-pass-file") sqoop_args["outdir"] = confParser.get_config_value("sqoop-out-dir") sqoop_args["tablename"] = self.job_name sqoop_args["targetdir"] = confParser.get_config_value( "hdfs-root-path") + "/" + self.job_name if obj_data: if len(self.utilFuncs.get_generator_value(obj_data, "HDFS")) > 0: sqoop_args["targetdir"] = self.utilFuncs.get_generator_value( obj_data, "HDFS")[0] sqoop_args["exportdir"] = self.utilFuncs.get_generator_value( obj_data, "HDFS")[0] sqoop_args["fetchsize"] = 1000 if obj_data: if len(self.utilFuncs.get_generator_value(obj_data, "FETCHSIZE")) > 0: sqoop_args["fetchsize"] = self.utilFuncs.get_generator_value( obj_data, "FETCHSIZE")[0] sqoop_args["nmappers"] = 20 if obj_data: if len(self.utilFuncs.get_generator_value(obj_data, "NMAP")) > 0: sqoop_args["nmappers"] = self.utilFuncs.get_generator_value( obj_data, "NMAP")[0] sqoop_args["splitcol"] = "ID" if obj_data: if len(self.utilFuncs.get_generator_value(obj_data, "SPLITCOL")) > 0: sqoop_args["splitcol"] = self.utilFuncs.get_generator_value( obj_data, "SPLITCOL")[0] sqoop_args["mapredname"] = "sqoop_job_" + self.job_name return sqoop_args
class GeneratorParser(object): def __init__(self): globalVars = Globals() self.utilFuncs = Funcs() self.lambdaFuncs = Lambdas() config_parser = ConfigParser() self.logger = FileLogger("generatorreader") self.__ROOT_DIR, self.__RESOURCE_DIR = globalVars.ROOT_DIR, "resource" self.__GENERATOR_CONFIG_KEY = "generator-param" self.obj_sep = config_parser.get_config_value("generator-obj-sep") self.file_name = config_parser.get_config_value( self.__GENERATOR_CONFIG_KEY) def get_file_path(self, file_name): try: fp = os.path.join(self.__ROOT_DIR, self.__RESOURCE_DIR, file_name) if os.path.exists(fp): return fp else: raise GeneratorFileNotFound(file_name) except: raise def parse_generator(self, obj_name): try: self.logger.info("Start Generator Parser") file_path = self.get_file_path(self.file_name) self.logger.debug("Read Generator File {0}".format(self.file_name)) with open(file_path, "r") as fr: raw_data = fr.readlines() self.logger.debug("Parse Generator File {0}".format( self.file_name)) generator_objects = self.parse_lines(raw_data, self.obj_sep, obj_name) self.logger.debug( "Parse Completed {0}: File Content {1}".format( self.file_name, len(generator_objects))) return generator_objects except GeneratorFileNotFound: return None raise except GeneratorFileNoContent: return None raise except Exception as e: return None self.logger.error("Generator File Reading Error" + e) raise def parse_lines(self, raw_lines, obj_sep, obj_name): try: file_lines = list(map(self.lambdaFuncs.fn_gen_splitter, raw_lines)) filtered_lines = self.utilFuncs.map_generator_object( file_lines, obj_sep, obj_name) if len(filtered_lines) == 0: raise GeneratorFileNoContent(self.file_name) return filtered_lines except GeneratorFileNoContent: raise except: raise
'Could not download file...') data = io.BytesIO( await resp.read()) #create BytesIO instance await channel.send(":rocket:" + "\t" + "__**" + apodData['title'] + "**__" + "\t" + ":rocket:" + "\t" + "__**" + apodData['date'] + "**__" + "\t" + ":rocket:" + "\n") await channel.send(file=discord.File(data, 'apod.png')) await channel.send("```" + "\n" + apodData['explanation'] + "\n" + "```") elif (apodData['media_type'] == 'video'): await channel.send(":rocket:" + "\t" + "__**" + apodData['title'] + "**__" + "\t" + ":rocket:" + "\t" + "__**" + apodData['date'] + "**__" + "\t" + ":rocket:" + "\n") await channel.send(apodData['url'].replace( "?rel=0", "").replace("embed/", "watch?v=") + "\n") await channel.send("```" + "\n" + apodData['explanation'] + "\n" + "```") await asyncio.sleep(1 * (60 * 60)) # task to run once hourly bot.add_cog(Music(bot)) bot.add_cog(Funcs(bot)) task = bot.loop.create_task(apod_task()) #token = os.environ.get("BOT_TOKEN") bot.run(secret)
def __init__(self, bot): self.bot = bot self.db = Database(db_name) self.funcs = Funcs() self.reactions = ['⏮', '⏪', '⏩', '⏭', '🆗']
class ValidationParser(object): def __init__(self): globalVars = Globals() self.utilFunctions = Funcs() self.lambdaFuncs = Lambdas() self.__ROOT_DIR = globalVars.ROOT_DIR self.__RESOURCE_DIR = "resource" self.__VALIDATION_CONFIG_KEY = "validator-param" self.logger = FileLogger("validationreader") config_parser = ConfigParser() self.file_name = config_parser.get_config_value( self.__VALIDATION_CONFIG_KEY) self.obj_sep = config_parser.get_config_value("validator-sep") def get_file_path(self, file_name): try: fp = os.path.join(self.__ROOT_DIR, self.__RESOURCE_DIR, file_name) if os.path.exists(fp): return fp else: raise ValidationFileNotFound(file_name) except: raise def parse_validation(self): try: self.logger.info("Start Validation Parser") file_path = self.get_file_path(self.file_name) self.logger.debug("Read Validation File {0}".format( self.file_name)) with open(file_path, "r") as fr: raw_data = fr.readlines() self.logger.debug("Parse Validation File {0}".format( self.file_name)) validation_objects = self.parse_lines(raw_data, self.obj_sep) self.logger.debug( "Parse Complete {0}: File Content {1}".format( self.file_name, len(validation_objects))) return validation_objects except ValidationFileNotFound as e: raise except ValidationNoContent: raise except: self.logger.error("Validation File Reading Error") raise def parse_lines(self, raw_lines, line_sep): try: file_lines = list( map(self.lambdaFuncs.fn_valid_splitter, raw_lines)) header = file_lines[0] file_lines = file_lines[1:] mapped_lines = self.utilFunctions.map_validation_object( file_lines, header) if len(mapped_lines) == 0: raise ValidationNoContent(self.file_name) return mapped_lines except: raise def find_validation_object(self, **kwargs): object_map_list = self.parse_validation() if (len(kwargs) == 6): kwargs_str = self.utilFunctions.fn_dict_formatter(kwargs.items()) db_object_list = [] for o in object_map_list: obj_list = list(o.items())[:6] o_str = self.utilFunctions.fn_dict_formatter(obj_list) db_object_list.append(o_str) db_object_list = list(set(db_object_list)) if kwargs_str in db_object_list: hive_obj = self.utilFunctions.fn_object_formatter( kwargs["HIVE_SCH"], kwargs["HIVE_TBL"], kwargs["HIVE_COL"]) ora_obj = self.utilFunctions.fn_object_formatter( kwargs["ORA_SCH"], kwargs["ORA_TBL"], kwargs["ORA_COL"]) return hive_obj, ora_obj else: raise ValidationObjectNotFound(kwargs_str) else: raise ValidationObjectArgError(len(kwargs))