def get_error_message(self): """ Controlla l'integrità degli attributi della istanza. """ if self.season.get_error_message(SEASON, "season") != "": msg = self.season.get_error_message(SEASON, "season") elif self.temperature < -273 or self.temperature > 10000: msg = "temperatura del clima errata: %d" % self.temperature elif self.wind_direction.get_error_message(DIR, "wind_direction") != "": msg = self.wind_direction.get_error_message(DIR, "wind_direction") elif self.cloud_color.get_error_message(COLOR, "cloud_color") != "": msg = self.cloud_color.get_error_message(COLOR, "cloud_color") elif self.cloud < 0 or self.cloud > 100: msg = "probabilità di nuvolosità errata: %d" % self.cloud elif self.humidity < 0 or self.humidity > 100: msg = "probabilità di umidità errata: %d" % self.humidity elif self.fog < 0 or self.fog > 100: msg = "probabilità di nebbia errata: %d" % self.fog elif self.rain < 0 or self.rain > 100: msg = "probabilità di pioggia errata: %d" % self.rain elif self.hail < 0 or self.hail > 100: msg = "probabilità di grandine errata: %d" % self.hail elif self.snow < 0 or self.snow > 100: msg = "probabilità di nevicate errata: %d" % self.snow elif self.lightning < 0 or self.lightning > 100: msg = "probabilità dei tempeste con fulmini errata: %d" % self.lightning else: return "" log.error("(Meteo: repr %s) %s" % (repr(self), msg)) return msg
def secret_yaml(self, config): """Update secret.yaml""" if config.version == "0.0.1": if FF.is_enabled("WALBOT_FEATURE_NEW_CONFIG") == "1": os.makedirs("db", exist_ok=True) sqlite3 = importlib.import_module("sqlite3") con = sqlite3.connect(os.path.join("db", "secret.db")) cur = con.cursor() cur.execute("CREATE TABLE db_info (key text, value text)") cur.execute("INSERT INTO db_info VALUES ('version', '0.1.0')") cur.execute("CREATE TABLE tokens (key text, value text)") cur.execute("INSERT INTO tokens VALUES ('discord', ?)", (config.token, )) con.commit() con.close() os.remove(self.config_name + '.yaml') log.info("Successfully migrated contig.yaml to db/config.db!") else: config.__dict__["mail"] = { "smtp_server": None, "email": None, "password": None, } config.__dict__["admin_email_list"] = list() self._bump_version(config, "0.0.2") if config.version == "0.0.2": log.info(f"Version of {self.config_name} is up to date!") else: log.error( f"Unknown version {config.version} for {self.config_name}!")
async def on_message(self, message: discord.Message) -> None: try: if self.config.guilds[message.channel.guild.id].ignored: return log.info(str(message.author) + " -> " + message.content) if message.author.id == self.user.id: return if isinstance(message.channel, discord.DMChannel): return if message.channel.guild.id is None: return if self.config.guilds[message.channel.guild.id].is_whitelisted: if message.channel.id not in self.config.guilds[ message.channel.guild.id].whitelist: return if message.author.id not in self.config.users.keys(): self.config.users[message.author.id] = User(message.author.id) if self.config.users[ message.author. id].permission_level < const.Permission.USER.value: return if not message.content.startswith( self.config.commands_prefix ) and not self.user.mentioned_in(message): return await message.channel.send(self.bot_response) except Exception: log.error("on_message failed", exc_info=True)
def markov_yaml(self, config): """Update markov.yaml""" if config.version == "0.0.1": config.__dict__["min_chars"] = 1 config.__dict__["min_words"] = 1 self._bump_version(config, "0.0.2") if config.version == "0.0.2": config.__dict__["chains_generated"] = 0 self._bump_version(config, "0.0.3") if config.version == "0.0.3": config.__dict__["max_chars"] = 2000 config.__dict__["max_words"] = 500 self._bump_version(config, "0.0.4") if config.version == "0.0.4": config.model[""].__dict__["word"] = None self._bump_version(config, "0.0.5") if config.version == "0.0.5": for i, _ in enumerate(config.filters): config.__dict__["filters"][i] = re.compile( config.filters[i].pattern, re.DOTALL) self._bump_version(config, "0.0.6") if config.version == "0.0.6": config.__dict__["ignored_prefixes"] = dict() self._bump_version(config, "0.0.7") if config.version == "0.0.7": log.info(f"Version of {self.config_name} is up to date!") else: log.error( f"Unknown version {config.version} for {self.config_name}!")
def run(self, *args, **kwargs): # Sightly patched implementation from discord.py discord.Client (parent) class # Reference: https://github.com/Rapptz/discord.py/blob/master/discord/client.py loop = self.loop try: loop.add_signal_handler(signal.SIGINT, lambda: loop.stop()) loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop()) except NotImplementedError: pass asyncio.ensure_future(self._bot_runner_task(*args, *kwargs), loop=loop) try: loop.run_forever() except KeyboardInterrupt: loop.stop() log.info('Received signal to terminate bot and event loop') log.info("Shutting down the bot...") tasks = {t for t in asyncio.all_tasks(loop=loop) if not t.done()} for task in tasks: task.cancel() loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True)) for task in tasks: if not task.cancelled(): log.error("Asynchronous task cancel failed!") loop.run_until_complete(loop.shutdown_asyncgens()) loop.run_until_complete(self._on_shutdown()) loop.close() log.info("Bot is shut down!")
def stop(self, _, main_bot=True): if not BotCache(main_bot).exists(): return log.error( "Could not stop the bot (cache file does not exist)") bot_cache = BotCache(main_bot).parse() pid = bot_cache["pid"] if pid is None: return log.error( "Could not stop the bot (cache file does not contain pid)") if psutil.pid_exists(pid): if sys.platform == "win32": # Reference to the original solution: # https://stackoverflow.com/a/64357453 import ctypes kernel = ctypes.windll.kernel32 kernel.FreeConsole() kernel.AttachConsole(pid) kernel.SetConsoleCtrlHandler(None, 1) kernel.GenerateConsoleCtrlEvent(0, 0) else: os.kill(pid, signal.SIGINT) while psutil.pid_exists(pid): log.debug("Bot is still running. Please, wait...") time.sleep(0.5) log.info("Bot is stopped!") else: log.error("Could not stop the bot (bot is not running)") BotCache(main_bot).remove()
async def send_command(self, plugin_name: str, command_name: str, *args, **kwargs) -> Any: """Send command to specific plugin""" if plugin_name not in self._plugins.keys(): return log.error(f"Unknown plugin '{plugin_name}'") if command_name not in self._plugin_functions_interface: return log.error(f"Unknown command '{command_name}' for plugin") if await self._plugins[plugin_name].is_enabled() or command_name == "init": return await getattr(self._plugins[plugin_name], command_name)(*args, **kwargs)
def __init__(self, *args, **kwargs): MSG = f"O campo '{args[0]}' é obrigatório." self.data = { 'detail': MSG, 'code': FIELD_IS_REQUIRED, } log.error(MSG) super().__init__(*args, **kwargs)
def __init__(self, *args, **kwargs): MSG = f"O valor '{args[0]}' é um ID inválido." self.data = { 'detail': MSG, 'code': INVALID_NOTICIA_ID, } log.error(MSG) super().__init__(*args, **kwargs)
def __init__(self, secret_config: SecretConfig) -> None: self.secrets = None if not all(secret_config.mail.values()): log.error( "Email service cannot be initialized. Fill all mail fields in secret.yaml" ) return self.secrets = secret_config.mail
def __init__(self, *args, **kwargs): MSG = f"O campo '{args[0]}' não deve possuir valores numéricos." self.data = { 'detail': MSG, 'code': FIELD_NOT_SHOULD_HAS_INT_VALUE, } log.error(MSG) super().__init__(*args, **kwargs)
def __init__(self, *args, **kwargs): MSG = "Os dados enviados possuem um estrutura inválida. " \ "Os campos esperados são: 'titulo', 'texto' e 'auto.nome'." self.data = { 'detail': MSG, 'code': INVALID_STRUCTURE, } log.error(MSG) super().__init__(*args, **kwargs)
def check_privileges(): """Checks if KISS was run as root, logging an error and leaving if not.""" if platform == "linux": if os.getuid() != 0: log.error( None, "Admin privileges are required for actions such as sniffing and sending packets. Did you run KISS as root?" ) sys.exit()
def __init__(self) -> None: self.config_version = const.CONFIG_VERSION self.markov_version = const.MARKOV_CONFIG_VERSION self.secret_version = const.SECRET_CONFIG_VERSION try: self.repo = git.Repo(search_parent_directories=True) except git.exc.InvalidGitRepositoryError: log.error( "Failed to find walbot git repo. Autoupdate function is available only for git repository" )
def read_config_file(path: str) -> Any: """Read YAML configuration file""" yaml_loader, _ = Util.get_yaml() if not os.path.isfile(path): return None with open(path, 'r') as f: try: return yaml.load(f.read(), Loader=yaml_loader) except Exception: log.error(f"File '{path}' can not be read!", exc_info=True) return None
def connect(self) -> None: self._db_client = pymongo.MongoClient(self.url, serverSelectionTimeoutMS=10, connectTimeoutMS=20000) try: info = self._db_client.server_info() log.debug(f"Mongo connection initialized: {info['version']}") except ServerSelectionTimeoutError as e: log.error(f"Mongo connection failed: {e}") self._db = self._db_client[self._db_name] self.markov = self._db["markov"]
def get_next_event_delta(self): if self.repeat_interval_measure == "minutes": return datetime.timedelta(minutes=self.repeat_after) if self.repeat_interval_measure == "months": return dateutil.relativedelta.relativedelta( months=self.repeat_after) if self.repeat_interval_measure == "years": return dateutil.relativedelta.relativedelta( years=self.repeat_after) log.error( f"Unknown repeat_interval_measure: {self.repeat_interval_measure}") return datetime.timedelta(minutes=0)
def update(self): """Perform update""" yaml_path = self.config_name + '.yaml' if os.path.isfile(yaml_path): # .yaml file path config = Util.read_config_file(yaml_path) getattr(self, self.config_name + "_yaml")(config) else: if FF.is_enabled("WALBOT_FEATURE_NEW_CONFIG") == "1": getattr(self, self.config_name + "_db")() else: log.error(f"File '{self.config_name}.yaml' does not exist") sys.exit(const.ExitStatus.CONFIG_FILE_ERROR) if self.modified: self._save_yaml_file(yaml_path, config)
def check_version(name, actual, expected, solutions=None, fatal=True): if actual == expected: return True if not fatal: log.warning( f"{name} versions mismatch. Expected: {expected}, but actual: {actual}" ) else: log.error( f"{name} versions mismatch. Expected: {expected}, but actual: {actual}" ) if solutions: log.info("Possible solutions:") for solution in solutions: log.info(f" - {solution}") return not fatal
async def broadcast_command(self, command_name: str, *args, **kwargs) -> None: """Broadcast command for all plugins""" if command_name not in self._plugin_functions_interface: return log.error(f"Unknown command '{command_name}' for plugin") for plugin_name in self._plugins.keys(): if await self._plugins[plugin_name].is_enabled() or command_name == "init": await getattr(self._plugins[plugin_name], command_name)(*args, **kwargs)
def check_updates(context: AutoUpdateContext) -> bool: """Function that performs updates check. It is called periodically""" old_sha = context.repo.head.object.hexsha try: context.repo.remotes.origin.fetch() except Exception as e: return log.error( f"Fetch failed: {e}. Skipping this cycle, will try to update on the next one" ) new_sha = context.repo.remotes.origin.refs['master'].object.name_rev.split( )[0] log.debug(f"{old_sha} {new_sha}") if old_sha == new_sha: return log.debug("No new updates") bot_cache = importlib.import_module("src.bot_cache").BotCache(True).parse() if bot_cache is None: return log.warning( "Could not read bot cache. Skipping this cycle, will try to update on the next one" ) if "do_not_update" not in bot_cache.keys(): return log.warning( "Could not find 'do_not_update' field in bot cache. " "Skipping this cycle, will try to update on the next one") if bot_cache["do_not_update"]: return log.debug( "Automatic update is not permitted. Skipping this cycle, will try to update on the next one" ) context.repo.git.reset("--hard") try: g = git.cmd.Git(os.getcwd()) g.pull() except git.exc.GitCommandError as e: if "Connection timed out" in e.stderr or "Could not resolve host" in e.stderr: log.warning(f"{e.command}: {e.stderr}") else: raise e subprocess.call(f"{sys.executable} -m pip install -r requirements.txt", shell=True) minibot_response = "WalBot automatic update is in progress. Please, wait..." subprocess.call( f"{sys.executable} walbot.py startmini --message '{minibot_response}' --nohup &", shell=True) subprocess.call(f"{sys.executable} walbot.py stop", shell=True) if context.check_versions(): subprocess.call(f"{sys.executable} walbot.py patch", shell=True) subprocess.call(f"{sys.executable} walbot.py start --fast_start --nohup &", shell=True) while True: time.sleep(1) bot_cache = importlib.import_module("src.bot_cache").BotCache( True).parse() if bot_cache is not None and bot_cache["ready"]: subprocess.call(f"{sys.executable} walbot.py stopmini", shell=True) log.info("Bot is fully loaded. MiniWalBot is stopped.") break log.debug("Bot is not fully loaded yet. Waiting...") return True
def send(self, addrs: List[str], subject: str, message: str) -> None: if not self.secrets: return try: self.connect() all_addrs = addrs.copy() all_addrs.append(self.secrets["email"]) result = (f"From: WalBot <{self.secrets['email']}>\n" f"To: {', '.join(addrs)}\n" f"Subject: {subject}\n" "\n" + message) self._server.sendmail(from_addr=self.secrets["email"], to_addrs=all_addrs, msg=result.encode("utf-8")) log.info(f"Sent message:\n'''\n{result}'''") self.disconnect() except Exception as e: log.error(f"Send e-mail failed: {e}", exc_info=True)
def get_error_message(self): """ Ritorna un messaggio di errore se la struttura dell'area contiene delle anomalie, altrimenti se tutto è corretto ritorna una stringa vuota. """ if not self.code: msg = "code dell'area non valido" elif not self.code.islower(): msg = "code dell'area per convenzione deve essere scritto in minuscolo: %s" % self.code elif not self.short: msg = "short dell'area non valido: %r" % self.short elif not self.name: msg = "name dell'area non valido: %r" % self.name elif not self.descr: msg = "descr dell'area non valida: %r" % self.descr elif self.get_error_message_creators() != "": msg = self.get_error_message_creators() elif self.level <= 0: msg = "Il livello di un'area non può essere uguale a 0 o negativo: %d" % self.level # elif self.races.get_error_message(RACE, "races") != "": # msg = self.races.get_error_message(RACE, "races") elif self.flags.get_error_message(AREA, "flags") != "": msg = self.flags.get_error_message(AREA, "flags") elif self.color.get_error_message(COLOR, "color") != "": msg = self.color.get_error_message(COLOR, "color") elif self.get_error_message_climates() != "": msg = self.get_error_message_climates() elif self.maze and self.maze.get_error_message() != "": msg = self.maze.get_error_message() elif self.wumpus and self.wumpus.get_error_message() != "": msg = self.wumpus.get_error_message() elif self.repop_time < config.min_repop_time or self.repop_time > config.max_repop_time: msg = "il repop time deve essere compreso tra %d e %d minuti invece è: %d" % ( config.min_repop_time, config.max_repop_time, self.repop_time) elif not self.landfill_code and not self.wild: msg = "landfill_code dell'area non valido: %r" % self.landfill_code elif self.get_error_message_resets() != "": msg = self.get_error_message_resets() else: return "" # Se arriva qui c'è un messaggio d'errore da inviare log.error("(Area: %s) %s" % (self.code, msg)) return msg
def dependencias(): """Checks KISS dependencies: scapy 2.4.0+ and custom Scapy files. If something missing, logs and exits. """ f = open(scapy.__path__[0] + "/sendrecv.py") content = f.read() f.close() if scapy.VERSION < "2.4.0": log.error( None, "Scapy " + scapy.VERSION + "is not supported. Please download Scapy +2.4.0.") sys.exit() if not "Klecko" in content: log.error( None, "Your sendrecv.py Scapy file is not KISS custom file. Please move files in scapy_files folder to the Scapy directory." ) sys.exit()
def backup(self, *files): compress_type = zipfile.ZIP_DEFLATED if self.saving["backup"][ "compress"] else zipfile.ZIP_STORED for file in files: path = os.path.dirname(file) name, ext = os.path.splitext(os.path.basename(file)) name += "_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") backup_file = name + ext backup_archive = os.path.join(path, "backup", name + ext + ".zip") if not os.path.exists("backup"): os.makedirs("backup") try: with zipfile.ZipFile(backup_archive, mode='w') as zf: zf.write(file, arcname=backup_file, compress_type=compress_type) except Exception as e: log.error( f"Unable to create backup {file} -> {backup_file}: {e}") else: log.info(f"Created backup for {file}: {backup_file}")
def register(self, reload: bool = False) -> None: """Find plugins in plugins directory and register them""" plugin_directory = os.path.join(os.path.dirname(__file__), "plugins") plugin_modules = ['src.plugins.' + os.path.splitext(path)[0] for path in os.listdir(plugin_directory) if os.path.isfile(os.path.join(plugin_directory, path)) and path.endswith(".py")] private_plugin_directory = os.path.join(os.path.dirname(__file__), "plugins", "private") plugin_modules += [Util.path_to_module( f"src.plugins.private.{os.path.relpath(path, private_plugin_directory)}." f"{os.path.splitext(file)[0]}") for path, _, files in os.walk(private_plugin_directory) for file in files if os.path.isfile(os.path.join(private_plugin_directory, path, file)) and file.endswith(".py")] importlib.invalidate_caches() for module in plugin_modules: log.debug2(f"Processing plugins from module: {module}") plugins_file = importlib.import_module(module) if reload: importlib.reload(plugins_file) plugins = [obj[1] for obj in inspect.getmembers(plugins_file, inspect.isclass) if (obj[1].__module__ == module) and issubclass(obj[1], BasePlugin)] if len(plugins) == 1: plugin = plugins[0] actual_functions_list = [ func[0] for func in inspect.getmembers(plugin, inspect.isfunction) if not func[0].startswith('_') ] if all(x in actual_functions_list for x in self._plugin_functions_interface): p = plugin() self._plugins[p.get_classname()] = p log.debug(f"Registered plugin '{p.get_classname()}'") else: log.error(f"Class '{p.get_classname()}' does comply with BasePlugin interface") elif len(plugins) > 1: log.error(f"Module '{module}' have more than 1 class in it") else: log.error(f"Module '{module}' have no classes in it")
async def wrapped(*args, **kwargs): try: return await func(*args, **kwargs) except Exception as e: try: bot_info = bc.info.get_full_info(2) except Exception: log.warning("Failed to get bot info to attach to e-mail", exc_info=True) bot_info = "ERROR: Failed to retrieve details, please refer to log file" if bc.secret_config.admin_email_list: mail = Mail(bc.secret_config) mail.send( bc.secret_config.admin_email_list, f"WalBot (instance: {bc.instance_name}) {func.__name__} failed", f"{func.__name__} failed:\n" f"{e}\n" "\n" f"Backtrace:\n" f"{traceback.format_exc()}\n" f"Details:\n" f"{bot_info}") log.error(f"{func.__name__} failed", exc_info=True)
def _send_spoofed_packet(self, real_packet): """Creates a spoofed http packet with injected code and sends it. Parameters: real_packet (scapy.packet.Packet): original packet. """ try: spoof_load = Spoofed_HTTP_Load(real_packet.load, self.injected_code) except ForwardPacketPlease as err: if err.args[0] == "Empty gzipped packet": log.js.warning("gzipped_empty_packet") elif err.args[ 0] == "Compressed file ended before the end-of-stream marker was reached": log.js.warning("gzipped_uncomplete_packet") self._forward_http_packet(real_packet) return except Exception as err: log.error("js", "Unexpected error creating spoofed http load:", type(err), err, ". Original packet length:", len(real_packet)) self._forward_http_packet(real_packet) #raise return spoof_packet = IP(src=real_packet["IP"].src, dst=real_packet["IP"].dst, flags=real_packet["IP"].flags)/ \ TCP(sport=real_packet["TCP"].sport, dport=real_packet["TCP"].dport, seq=real_packet["TCP"].seq, ack=real_packet["TCP"].ack, flags=real_packet["TCP"].flags)/ \ Raw(load=spoof_load) send(spoof_packet, verbose=0) self._add_handled_packet(spoof_packet) log.js.info("packet_handled", len_spoof_load=len(spoof_load), len_real_load=len(real_packet.load))
def dump_autocomplete_script(shell, parser): if shell == "bash": try: shtab = importlib.import_module("shtab") except ImportError: log.error("Shell autocompletion scripts update failed.") log.error( f" Install `shtab`: {sys.executable} -m pip install shtab") return result = shtab.complete(parser, shell="bash").replace("walbot.py", "./walbot.py") with open( os.path.join(os.getcwd(), "tools", "autocomplete", "walbot-completion.bash"), "w") as f: print(result, file=f) else: log.error("Unsupported shell type")
def save(self, config_file, markov_file, secret_config_file, wait=False): config_mutex = threading.Lock() with config_mutex: log.info("Saving of config is started") with open(config_file, 'wb') as f: try: f.write( yaml.dump(self, Dumper=bc.yaml_dumper, encoding='utf-8', allow_unicode=True)) log.info("Saving of config is finished") except Exception: log.error("yaml.dump failed", exc_info=True) secret_config_mutex = threading.Lock() with secret_config_mutex: log.info("Saving of secret config is started") with open(secret_config_file, 'wb') as f: try: f.write( yaml.dump(bc.secret_config, Dumper=bc.yaml_dumper, encoding='utf-8', allow_unicode=True)) log.info("Saving of secret config is finished") except Exception: log.error("yaml.dump failed", exc_info=True) if bc.do_not_update[DoNotUpdateFlag.VOICE]: return log.info( "Markov module save is skipped since bot is in voice channel") markov_mutex = threading.Lock() with markov_mutex: log.info("Saving of Markov module data is started") try: thread = threading.Thread(target=bc.markov.serialize, args=(markov_file, bc.yaml_dumper)) thread.start() if wait: log.info("Waiting for saving of Markov module data...") thread.join() log.info("Saving of Markov is waited") except Exception: log.error("Saving of Markov module data is failed", exc_info=True)