def __init__(self): BasePlugin.__init__(self) discord.Client.__init__(self) self.token = None self.channel = None self.staff_channel = None self.token = None self.client_id = None self.mock_connection = None self.prefix = None self.command_prefix = None self.dispatcher = None self.color_strip = re.compile("\^(.*?);") self.command_target = None self.sc = None self.irc_bot_exists = False self.irc = None self.chat_manager = None self.rank_roles = None self.discord_logger = None self.allowed_commands = ('who', 'help', 'uptime', 'motd', 'show_spawn', 'ban', 'unban', 'kick', 'list_bans', 'mute', 'unmute', 'set_motd', 'whois', 'broadcast', 'user', 'del_player', 'maintenance_mode', 'shutdown', 'save')
def activate(self): BasePlugin.activate(self) self.dispatcher = self.plugins.command_dispatcher self.irc_bot_exists = link_plugin_if_available(self, 'irc_bot') if self.irc_bot_exists: self.irc = self.plugins['irc_bot'] self.prefix = self.config.get_plugin_config( "command_dispatcher")["command_prefix"] self.command_prefix = self.config.get_plugin_config( self.name)["command_prefix"] self.token = self.config.get_plugin_config(self.name)["token"] self.client_id = self.config.get_plugin_config(self.name)["client_id"] self.channel = self.config.get_plugin_config(self.name)["channel"] self.staff_channel = self.config.get_plugin_config( self.name)["staff_channel"] self.sc = self.config.get_plugin_config(self.name)["strip_colors"] asyncio.ensure_future(self.start_bot()) self.update_id(self.client_id) self.mock_connection = MockConnection(self) self.rank_roles = self.config.get_plugin_config( self.name)["rank_roles"] if link_plugin_if_available(self, "chat_manager"): self.chat_manager = self.plugins['chat_manager'] self.discord_logger = logging.getLogger("discord") self.discord_logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setFormatter( logging.Formatter( '%(asctime)s - %(levelname)s - ' '%(name)s # %(message)s', datefmt='%Y-%m-%d %H:%M:%S')) self.discord_logger.addHandler(ch)
def activate(self): BasePlugin.activate(self) self.dispatcher = self.plugins.command_dispatcher self.irc_bot_exists = link_plugin_if_available(self, 'irc_bot') if self.irc_bot_exists: self.irc = self.plugins['irc_bot'] self.prefix = self.config.get_plugin_config("command_dispatcher")[ "command_prefix"] self.command_prefix = self.config.get_plugin_config(self.name)[ "command_prefix"] self.token = self.config.get_plugin_config(self.name)["token"] self.client_id = self.config.get_plugin_config(self.name)["client_id"] self.channel = self.config.get_plugin_config(self.name)["channel"] self.staff_channel = self.config.get_plugin_config(self.name)[ "staff_channel"] self.sc = self.config.get_plugin_config(self.name)["strip_colors"] asyncio.ensure_future(self.start_bot()) self.update_id(self.client_id) self.mock_connection = MockConnection(self) self.rank_roles = self.config.get_plugin_config(self.name)[ "rank_roles"] if link_plugin_if_available(self, "chat_manager"): self.chat_manager = self.plugins['chat_manager'] self.discord_logger = logging.getLogger("discord") self.discord_logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - ' '%(name)s # %(message)s', datefmt='%Y-%m-%d %H:%M:%S')) self.discord_logger.addHandler(ch)
def __init__(self, application, name): # We need to init the BasePlugin. It will create the references # described above. BasePlugin.__init__(self, application, name) # Configuration # This is the number of rows that we want to allow to print at once. self.max_rows = self.conf.getint("max_rows", 5) self.conf["max_rows"] = str(self.max_rows) # Here, we store the used lyrics. self.lyrics_file = os.path.join(self.data_dir, "lyrics.txt") # If the lyrics file does not exist, we'll init it with the default # lyrics. if not os.path.exists(self.lyrics_file): with open(self.lyrics_file, "w") as file: file.write(_DEFAULT_LYRICS) # Now, we set our argparser up. self.argparser.description = ( "Demonstrates the implementation of a plugin. Inspired by the " "wordpress plugin \"Hello, Dolly\"." ) self.argparser.epilog = "https://emsm.benediktschmitt.de/" self.argparser.add_argument( "--rows", "-r", action="store", dest="rows", type=int, default=1, metavar="ROWS", help="The number of lines that will be printed. " ) return None
def __init__(self, application, name): BasePlugin.__init__(self, application, name) # Argumentparser self.setup_conf() self.setup_argparser() return None
def __init__(self, project_base_dir=None): self.name = "docker_jenkins" self.project_dir_name = "docker_jenkins" self.git_repo = "https://github.com/sabhiram/docker-jenkins" # Super Init... BasePlugin.__init__(self, project_base_dir)
def __init__(self, app, name): BasePlugin.__init__(self, app, name) self.initd_start = self.app.events.get_event("initd_start") self.initd_stop = self.app.events.get_event("initd_stop") self.start_occured = False self.stop_occured = False self.setup_conf() self.setup_argparser() return None
def __init__(self, app, name): BasePlugin.__init__(self, app, name) # Argparser self.argparser.description = ( "This plugin provides methods to install or remove plugins from " "this application.") self.argparser.add_argument( "-i", "--install", action = "store", dest = "install", metavar = "ARCHIVE", help = "Installs the plugin from the archive." ) self.argparser.add_argument( "-r", "--remove", action = "store", dest = "remove", metavar = "PLUGIN", choices = self.app.plugins.get_plugin_names(), help = "Removes the plugin from the EMSM." ) ## self.argparser.add_argument( ## "-u", "--update", ## action = "append", ## dest = "update", ## metavar = "PLUGIN", ## choices = self.app.plugins.get_plugin_names(), ## help = "Tries to update the plugin, if a new version is available." ## ) ## self.argparser.add_argument( ## "-U", "--update-all", ## action = "count", ## dest = "update_all", ## help = "Updates all plugins." ## ) self.argparser.add_argument( "-d", "--doc", action = "store", dest = "print_doc", metavar = "PLUGIN", choices = self.app.plugins.get_plugin_names(), help = "Prints the docstring of the plugin." ) return None
def __init__(self, app, name): BasePlugin.__init__(self, app, name) # Configuration # Whats to do if a world is offline. self.error_action = self.conf.get("error_action") if self.error_action not in ("none", "restart", "stop", "stderr"): self.error_action = "none" self.error_regex = self.conf.get("error_regex", "(\[SEVERE\])") self.auto_run = self.conf.getboolean("auto_run", False) self.guard_all_worlds = self.conf.getboolean("guard_all_worlds", False) self.conf["error_action"] = self.error_action self.conf["error_regex"] = self.error_regex self.conf["auto_run"] = "yes" if self.auto_run else "no" self.conf["guard_all_worlds"] = "yes" if self.guard_all_worlds else "no" # Argparser self.argparser.description = ( "Watches the logfiles and checks if the worlds are running smooth.") return None
def prepare_url_datadir_for_date(self, rundate_obj: datetime) -> str: """ Prepare URL from given Date. :param rundate_obj: Date for the URL :return: The destination directory to save the data for this date. """ url_prepared_for_date = None prevDay = scraper_utils.getPreviousDaysDate(rundate_obj) prevToPrevDay = scraper_utils.getPreviousDaysDate(prevDay) if 'mainURLDateFormatted' in dir( self) and self.mainURLDateFormatted is not None: url_prepared_for_date = prevToPrevDay.strftime( self.mainURLDateFormatted) dataDirForDate = BasePlugin.identifyDataPathForRunDate( self.app_config.data_dir, prevToPrevDay) return (url_prepared_for_date, dataDirForDate)
def test_mapping_override_packets_dont_include_base_plugin(self): base_plugin = BasePlugin() with self.assertRaises(AttributeError): base_plugin.overridden_methods
def __init__(self, *args, **kwargs): BasePlugin.__init__(self, *args, **kwargs)
def test_plugins_total(): new_plugin = BasePlugin() assert new_plugin.plugins_total() == 1
def __init(self, app): BasePlugin.__init__(self, app)
def __init__(self, config): BasePlugin.__init__(self, config)
def processAnnouncements(self, announceFileFullPath, archiveURL): """ Process all company Announcements to the exchange """ logger.debug("Processing announcements for file: %s", announceFileFullPath) try: recordPrefixPat = re.compile( r'([a-zA-Z0-9\(\)¿ \-.^:]{3,})([ ]+[a-zA-Z0-9\-]{2,})([ ]+:)') with open(announceFileFullPath, 'rt') as fp: fileRecords = fp.readlines() fp.close() logger.debug("Read %s announcements.", len(fileRecords)) for index, announceRecord in enumerate(fileRecords): try: if (index > 0 and announceRecord.find(':') > -1 and announceRecord.lower().find("declaration of nav ") < 0 and announceRecord.lower().find( "recommended final dividend") < 0 and announceRecord.lower().find( "about investor presentation") < 0 and announceRecord.lower().find( "that the record date has been fixed on") < 0 and announceRecord.lower().find( "the maturity date of the scheme is") < 0 and announceRecord.lower().find( "suspension of trading") < 0 and announceRecord.lower().find("postal ballot") < 0): recordPrefix = announceRecord[:announceRecord. find(':') + 1] announceContent = announceRecord[announceRecord. find(':') + 1:] if index + 1 < len(fileRecords) and fileRecords[ index + 1].find(':') == -1: announceContent = announceContent + " " + fileRecords[ index + 1] searchRes = recordPrefixPat.search(recordPrefix) if searchRes is not None: entityName = searchRes.group(1) companySymbol = searchRes.group(2).strip() thisArticle = NewsEvent( ) # make article for each announcement thisArticle.setPublishDate(self.app_config.rundate) thisArticle.setModuleName(self.pluginName) thisArticle.setIndustries([companySymbol]) thisArticle.setTitle('NSE Announcement for ' + entityName.strip()) thisArticle.setKeyWords([entityName.strip()]) thisArticle.setText(announceContent) thisArticle.setURL(archiveURL) # save each article to its unique filename: company symbol, limited to 10 characters articleUniqueID = str(companySymbol.strip()[:10]) thisArticle.setArticleID(articleUniqueID) thisArticle.setSource('NSE') filename = BasePlugin.makeUniqueFileName( self.pluginName, self.identifyDataPathForRunDate( self.baseDirName, thisArticle.getPublishDate()), articleUniqueID, URL=archiveURL) thisArticle.writeFiles(filename, self.app_config.data_dir, announceContent) else: logger.debug( "Skipping record %s as it is not properly formatted.", index) except Exception as e: logger.error("Error processing NSE announcement no %s: %s", index, e) except Exception as e: logger.error("Error processing NSE announcements: %s", e)
def fetchDataFromURL(self, uRLtoFetch, WorkerID): """ Fetch data From given URL """ self.pluginState = Types.STATE_FETCH_CONTENT fullPathName = "" dirPathName = "" rawData = "" sizeOfDataDownloaded = -1 uncompressSize = 0 publishDateStr = "" publishDate = None resultVal = None self.master_data_dir = self.app_config.master_data_dir logger.debug("Fetching %s, Worker ID %s", uRLtoFetch.encode("ascii"), WorkerID) try: logging.captureWarnings(True) (publishDate, dataUniqueID) = self.extractUniqueIDFromURL(uRLtoFetch) rawData = self.downloadDataArchive(uRLtoFetch, type(self).__name__) publishDateStr = str(publishDate.strftime("%Y-%m-%d")) # write data to file: fileNameWithOutExt = BasePlugin.makeUniqueFileName( self.pluginName, self.identifyDataPathForRunDate(self.baseDirName, publishDateStr), dataUniqueID, URL=uRLtoFetch) dirPathName = os.path.join(self.app_config.data_dir, publishDateStr) fullPathName = os.path.join(dirPathName, fileNameWithOutExt + ".zip") sizeOfDataDownloaded = len(rawData) except Exception as e: logger.error("Trying to fetch data from given URL: %s", e) if sizeOfDataDownloaded > self.minArticleLengthInChars: try: if os.path.isdir(dirPathName) is False: # since dir does not exist, so try creating it: os.mkdir(dirPathName) except Exception as theError: logger.error( "Error creating data directory '%s', Exception was: %s", dirPathName, theError) try: with open(fullPathName, 'wb') as fp: n = fp.write(rawData) logger.debug("Wrote %s bytes to file: %s", n, fullPathName) fp.close() # save master data: sizeOfDataDownloaded = self.fetchMasterData( uRLtoFetch, self.master_data_dir, WorkerID, sizeOfDataDownloaded) # save pledges data: # sizeOfDataDownloaded = sizeOfDataDownloaded + self.fetchPledgesData(self.master_data_dir, publishDate) uncompressSize = self.parseFetchedData2( str(publishDate.strftime("%Y%m%d")), fullPathName, dirPathName, WorkerID, uRLtoFetch) except Exception as theError: logger.error( "Error saving downloaded data to zip file '%s': %s", fullPathName, theError) # save metrics/count of downloaded data for the given URL resultVal = ExecutionResult(uRLtoFetch, sizeOfDataDownloaded, uncompressSize, publishDateStr, self.pluginName) else: logger.info( "Ignoring data file '%s' since its size (%s bytes) is less than the minimum of %s bytes", fullPathName, len(rawData), self.minArticleLengthInChars) return (resultVal)