def test_get_articles_by_name(self): # Given articles = [{ "id": "1", "name": ["mlotek", "hammer"], "total_quantity": 20, "quantity": 5, "is_available": True }, { "id": "2", "name": ["wiertarka", "driller"], "total_quantity": 10, "quantity": 8, "is_available": False }] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db = DBConnector(DbFileConnector(config_manager)) search_string = 'rka' expected = [Article('2', ["wiertarka", "driller"], 10, 8, False)] # When articles = db.get_articles_by_name(search_string) # Then self.assertListEqual(expected, articles)
def test_add_article_quantity(self): # Given articles = [{ "id": "1", "name": ["mlotek", "hammer"], "total_quantity": 22, "quantity": 2, "is_available": True }, { "id": "2", "name": ["wiertarka", "driller"], "total_quantity": 22, "quantity": 2, "is_available": False }] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db = DBConnector(DbFileConnector(config_manager)) search_id = '2' expected = Article('2', ["wiertarka", "driller"], 22, 22, True) search_id_2 = '1' expected_2 = Article('1', ["mlotek", "hammer"], 22, 12, True) # When article = db.add_article_quantity(search_id, 20, True) article_2 = db.add_article_quantity(search_id_2, 10, True) # Then self.assertEqual(expected, article) self.assertEqual(expected_2, article_2)
def test_get_articles_by_borrowed(self): # Given articles = [{ "id": "1", "name": ["mlotek", "hammer"], "total_quantity": 2, "quantity": 2, "is_available": True }, { "id": "2", "name": ["wiertarka", "driller"], "total_quantity": 3, "quantity": 2, "is_available": False }] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db = DBConnector(DbFileConnector(config_manager)) expected = Article("2", ["wiertarka", "driller"], 3, 2, False) # When article = db.get_articles_by_borrowed()[0] # Then self.assertEqual(expected, article)
def test_remove_article_by_id(self): # Given articles = [{ "id": "1", "name": ["mlotek", "hammer"], "total_quantity": 2, "quantity": 1, "is_available": False }] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db = DBConnector(DbFileConnector(config_manager)) article_id = '1' expected = [] # When db.remove_article_by_id(article_id) # Then self.assertListEqual(expected, db.get_all_articles())
def test_get_all_articles(self): # Given articles = [{ "id": "1", "name": ["mlotek", "hammer"], "total_quantity": 2, "quantity": 3, "is_available": True }, { "id": "2", "name": ["wiertarka", "driller"], "total_quantity": 4, "quantity": 5, "is_available": False }] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db_file_connector = DbFileConnector(config_manager) db = DBConnector(db_file_connector) expected = [ Article('1', ["mlotek", "hammer"], 2, 3, True), Article('2', ["wiertarka", "driller"], 4, 5, False) ] # When articles = db.get_all_articles() # Then self.assertListEqual(expected, articles)
def test_add_article_3(self): # Given articles = [] with open(self.config_file_name, "w") as f: json.dump(articles, f) config_manager = ConfigManager() config_manager.db_path = self.config_file_name db = DBConnector(DbFileConnector(config_manager)) article = Article('1', ["mlotek", "hammer"], 1, 2, False) article2 = Article('2', ["mlotek2", "hammer2"], 3, 6, False) expected = [ Article('1', ["mlotek", "hammer"], 1, 2, False), Article('2', ["mlotek2", "hammer2"], 3, 6, False) ] # When db.add_article(article) db.add_article(article) db.add_article(article2) # Then self.assertListEqual(expected, db.get_all_articles())
class SavedConfigs(Screen, Activity): class ConfigPosition(Button): pass def __init__(self, *args, **kwargs): super(SavedConfigs, self).__init__(*args, **kwargs) # new ConfigManager for read saved configs self.config_manager = ConfigManager() def on_pre_enter(self): # creatin view with positions for every saved config positions = self.config_manager.get_list() self.ids.config_list.clear_widgets() callback = self.choose_config get_callback = lambda v: (lambda self: callback(v)) for name in positions: self.ids.config_list.add_widget( self.ConfigPosition( text=name, on_release=get_callback(name), )) def choose_config(self, name): data = self.config_manager.load(name) self.manager.go('NewBrew', data) def go_to_menu(self): self.manager.go('MainMenu')
def __init__(self, config_path: str, model_kind: str) -> None: self.config_path = config_path self.model_kind = model_kind self.config_manager = ConfigManager(config_path=config_path, model_kind=model_kind) self.config = self.config_manager.config self.losses = [] self.lr = 0 self.pad_idx = 0 self.criterion = nn.CrossEntropyLoss(ignore_index=self.pad_idx) self.set_device() self.config_manager.create_remove_dirs() self.text_encoder = self.config_manager.text_encoder self.start_symbol_id = self.text_encoder.start_symbol_id self.summary_manager = SummaryWriter( log_dir=self.config_manager.log_dir) self.model = self.config_manager.get_model() self.optimizer = self.get_optimizer() self.model = self.model.to(self.device) self.load_model(model_path=self.config.get("train_resume_model_path")) self.load_diacritizer() self.initialize_model() self.print_config()
def test_return_article_command_2(self): # Given config_manager = ConfigManager() app_info_logger = AppInfoLogger() db = DBConnector(DbFileConnector(config_manager)) logger = LoggerConnector(LoggerFileConnector(config_manager)) config_manager.db_path = self.database_file_name config_manager.logger_path = self.logger_file_name articles = [{ "id": "18", "is_available": True, "name": ["Paczka", "Package"], "quantity": 150, "total_quantity": 250 }] with open(self.database_file_name, "w") as f: json.dump(articles, f) with open(self.logger_file_name, "w") as f: json.dump([], f) INVOKER = Invoker(db, logger, config_manager, app_info_logger) expected = [Article('18', ["Paczka", "Package"], 250, 150, True)] # When with mock.patch('builtins.input', side_effect=["18", "101", "\n"]): with patch('sys.stdout', new=StringIO()): INVOKER.execute('14') # Then self.assertEqual(str(expected[0]), str(db.get_articles_by_name('Paczka')[0]))
def test_display_history_command(self): # Given config_manager = ConfigManager() app_info_logger = AppInfoLogger() db = DBConnector(DbFileConnector(config_manager)) logger = LoggerConnector(LoggerFileConnector(config_manager)) config_manager.db_path = self.database_file_name config_manager.logger_path = self.logger_file_name logs = [{ 'id': '1', 'logs': [{ "data": "12-11-2020", "text": "Returned 1" }] }] with open(self.logger_file_name, "w") as f: json.dump(logs, f) INVOKER = Invoker(db, logger, config_manager, app_info_logger) expected = "+------------+------------+" + "\n" \ + "| DATE | TEXT |" + "\n" \ + "+------------+------------+" + "\n" \ + "| 12-11-2020 | 1 RETURNED |" + "\n" \ + "+------------+------------+" # When with mock.patch('builtins.input', return_value="1"): with patch('sys.stdout', new=StringIO()) as result: INVOKER.execute('4') # Then self.assertEqual(expected, result.getvalue())
def test_change_status_command(self): # Given config_manager = ConfigManager() app_info_logger = AppInfoLogger() db = DBConnector(DbFileConnector(config_manager)) logger = LoggerConnector(LoggerFileConnector(config_manager)) config_manager.db_path = self.database_file_name config_manager.logger_path = self.logger_file_name articles = [{ "id": "3", "is_available": True, "name": ["Szalik", "Scarf"], "quantity": 1, "total_quantity": 1 }] with open(self.database_file_name, "w") as f: json.dump(articles, f) with open(self.logger_file_name, "w") as f: json.dump([], f) INVOKER = Invoker(db, logger, config_manager, app_info_logger) expected = [Article('3', ["Szalik", "Scarf"], 1, 1, False)] # When with mock.patch('builtins.input', side_effect=["3", "1", "1"]): with patch('sys.stdout', new=StringIO()): INVOKER.execute('9') self.assertEqual(str(expected[0]), str(db.get_articles_by_name('szalik')[0]))
def rename(): """ Main code to run """ program_config = ConfigManager() db_location = program_config.get_config("database_location") blacklist_file_location = program_config.get_config("blacklist_file") print(os.path.realpath(__file__)) blacklist_file_location = os.path.join(ConfigManager.current_dir, blacklist_file_location) target_location = input( "Provide target directory location(fully qualified location):") bellerophone = Bellerophone(blacklist_file_location) print("Renaming is in progress...") bellerophone.crawl_and_rename(target_location, True) if bellerophone.log == []: print("No objects to rename!") else: print("Renaming is completed and saving tracks...") with DatabaseContext(db_location) as db: db.create_processing_history_table() with DatabaseContext(db_location) as db: for child in bellerophone.log: db.insert_into_processing_history_table(child[0], child[1], child[2], child[3], child[4], child[5], child[6], child[7]) print("Execution completed.")
def i_remove_article(step, number): config_manager = ConfigManager() config_manager.db_path = world.path_db db_file_connector = DbFileConnector(config_manager) db = DBConnector(db_file_connector) db.remove_article_by_id(number) world.articles = db.get_all_articles()
def get_corename(self): if self.launcher_params is not None and self.launcher_params.get( 'core') is not None: core = self.launcher_params['core'] else: core = self.get_retroarch_property('core') coreinfo = ConfigManager.get_instance().get_config('coreinfo') if coreinfo is None: coreinfo = ConfigManager.get_instance().load_config( 'cores', extension='info', skip_inst_dir=True) if coreinfo is not None: if core is not None and coreinfo.has_section(core): return coreinfo[core]['corename'].replace('"', '') if len(self.game_data) > 0: # added for configuring ScummVM config = self.get_config() if config is not None and config.has_section( self.game_data['platform']): info_file = config['Launcher'][ 'cores location'] + '/' + core + '.info' with open(info_file) as f: for line in f: line = line.strip() if line.startswith('corename'): *_ignore, corename = line.rpartition('=') corename = corename.replace('"', '') return corename.strip() return ''
def __init__(self, config_path: str, model_kind: str) -> None: self.config_path = config_path self.model_kind = model_kind self.config_manager = ConfigManager(config_path=config_path, model_kind=model_kind) self.config = self.config_manager.config self.pad_idx = 0 self.criterion = nn.CrossEntropyLoss(ignore_index=self.pad_idx) self.set_device() self.text_encoder = self.config_manager.text_encoder self.start_symbol_id = self.text_encoder.start_symbol_id self.model = self.config_manager.get_model() self.model = self.model.to(self.device) self.load_model(model_path=self.config["test_model_path"], load_optimizer=False) self.load_diacritizer() self.diacritizer.set_model(self.model) self.initialize_model() self.print_config()
def test_get_logs_by_id(self): # Given logs = [{ 'id': '1', 'logs': [{ "data": "08-05-2020", "text": "Added" }] }, { 'id': '2', 'logs': [{ "data": "08-05-2020", "text": "Added" }, { "data": "07-05-2020", "text": "Deleted" }] }] with open(self.config_file_name, "w") as f: json.dump(logs, f) config_manager = ConfigManager() config_manager.logger_path = self.config_file_name logger = LoggerConnector(LoggerFileConnector(config_manager)) article_id = '1' expected = ArticleLogs('1', [Log("08-05-2020", "Added")]) # When logs = logger.get_logs_by_id(article_id) # Then self.assertEqual(expected, logs)
def i_get_article_by_id(step, id): config_manager = ConfigManager() config_manager.db_path = world.path_db db_file_connector = DbFileConnector(config_manager) db = DBConnector(db_file_connector) article = db.get_article_by_id(id) world.articles = [db.get_article_by_id(id)] if article else list()
def find_replace(): """ Find and Replace """ program_config = ConfigManager() db_location = program_config.get_config("database_location") target_location = input( "Provide target directory location(fully qualified location):") bellerophone = Bellerophone("") old_string = input("FIND:") if old_string: new_string = input("REPLACE:") if new_string: bellerophone.crawl_and_find_and_replace(target_location, True, old_string, new_string) if bellerophone.log == []: print("No objects to rename!") else: print("Renaming is completed and saving tracks...") with DatabaseContext(db_location) as db: db.create_processing_history_table() with DatabaseContext(db_location) as db: for child in bellerophone.log: db.insert_into_processing_history_table( child[0], child[1], child[2], child[3], child[4], child[5], child[6], child[7]) print("Execution completed.")
def read_config(self): try: self.injector.write("config_package\n\r".encode("ascii")) self.sleep() self.sleep() config_string = self.injector.read(size=1024).decode('utf-8') except Exception as e: self.logerror("No config found, %s", e) self.injector.close() self.injector = None return try: m = re.search('\!\@\#\$(.+?)\!\@\#\$', config_string) if m: config_string = m.group(1) self.loginfo('String found: %s', config_string) config = json.loads(config_string) except Exception as e: self.loginfo("String found: %s", str(config_string)) self.logerror("Could not load settings package: %s", e) self.injector.close() self.injector = None sleep(3) return if config: for k, v in config.items(): ConfigManager.set_value('game|global|%s' % k, v)
async def main(self): print(f'Connecting to websocket server: {ConfigManager.get("websocket_uri").format("xxxxx")}') self.server = ServerAdapterFactory.get_adapter( ConfigManager.get('server_adapter'), ConfigManager.get('websocket_uri').format(ConfigManager.get('auth_token')) ) await self.saved_data.init_db() await self.server.exec_with_context(self.run)
def test_fron_stream(self): stream = open("../config/test_config/chat.yaml", "r") cfg = ConfigManager(stream) self.assertIsNotNone(cfg.config) self.assertIsInstance(cfg.config, dict) self.assertIsNotNone(cfg.get("meta")) self.assertEqual(cfg.get("meta"), cfg["meta"]) stream.close()
def __init__(self): self.config_manager = ConfigManager() if self.config_manager.has_valid_config(): print("Valid config file is detected") self.setup() else: print("No valid config file is found")
def do_startup(self): Gtk.Application.do_startup(self) user_config_dir = os.path.expanduser( "~") + os.path.sep + ".basement_monitoring" if not os.path.exists(user_config_dir): os.makedirs(user_config_dir) self.config_manager = ConfigManager(user_config_dir + os.path.sep + "user_config.ini") self.serial_config = self.config_manager.read_serial_config() action = Gio.SimpleAction.new("quit", None) action.connect("activate", self.on_quit) self.add_action(action) action = Gio.SimpleAction.new("serialConfig", None) action.connect("activate", self.on_serial_config) self.add_action(action) self.connect_action = Gio.SimpleAction.new("serialConnect", None) self.connect_action.connect("activate", self.on_serial_connect) self.add_action(self.connect_action) self.disconnect_action = Gio.SimpleAction.new("serialDisconnect", None) self.disconnect_action.connect("activate", self.on_serial_disconnect) self.disconnect_action.set_enabled(False) self.add_action(self.disconnect_action) self.seconds_action = Gio.SimpleAction.new("viewSeconds", None) self.seconds_action.connect("activate", self.on_view_seconds) self.add_action(self.seconds_action) self.minutes_action = Gio.SimpleAction.new("viewMinutes", None) self.minutes_action.connect("activate", self.on_view_minutes) self.minutes_action.set_enabled(False) self.add_action(self.minutes_action) self.hours_action = Gio.SimpleAction.new("viewHours", None) self.hours_action.connect("activate", self.on_view_hours) self.hours_action.set_enabled(False) self.add_action(self.hours_action) builder = Gtk.Builder.new_from_string(MENU_XML, -1) self.set_menubar(builder.get_object("app-menu")) self.thread_data = threading.Thread(target=self.read_serial) self.thread_data.daemon = True self.thread_values = threading.Thread(target=self.update_values) self.thread_values.daemon = True self.thread_graph = threading.Thread(target=self.update_graph) self.thread_graph.daemon = True self.thread_data_test = threading.Thread( target=self.generate_test_data, args=[self.environmental_data_history]) self.thread_data_test.daemon = True
class FTPClient: def __init__(self): self.config = ConfigManager('cfg.ini') self.ftp = FTP(self.config.getFtpHostname()) self.ftp.login(self.config.getFtpUsername(), self.config.getFtpPassword()) self.catalog = CatalogManager() def listProductFiles(self, product): flist = {} ylist = [] if product == 'daily': pDir = DAILY_FTP_ROOT elif product == '5day': pDir = FIVEDAILY_FTP_ROOT elif product == 'monthly': pDir = MONTHLY_FTP_ROOT else: raise Exception('Unknown product') self.ftp.cwd(pDir) self.ftp.retrlines('NLST', ylist.append) for yearDir in ylist: flist[yearDir] = [] self.ftp.cwd(pDir + yearDir + '/') self.ftp.retrlines('NLST', flist[yearDir].append) res = {} for y in flist.keys(): for f in flist[y]: if not self.catalog.exists(product, y + '/' + f): m = re.search('OCx-([0-9]{6,8})-fv', f) res[m.group(1)] = y + '/' + f return res def getFile(self, product, srcFile, target): if product == 'daily': pDir = DAILY_FTP_ROOT elif product == '5day': pDir = FIVEDAILY_FTP_ROOT elif product == 'monthly': pDir = MONTHLY_FTP_ROOT else: raise Exception('Unknown product') self.ftp.cwd(pDir) if not os.path.exists(os.path.dirname(target)): os.makedirs(os.path.dirname(target)) with open(target, 'wb') as t: self.ftp.retrbinary('RETR ' + srcFile, t.write)
def test_singleton(self): # Given config_manager = ConfigManager() config_manager.logger_path = self.config_file_name # When logger = LoggerConnector(LoggerFileConnector(config_manager)) logger2 = LoggerConnector(LoggerFileConnector(config_manager)) # Then self.assertEqual(logger2, logger)
def i_show_history_of_rentals_of_article(step, number): config_manager = ConfigManager() config_manager.logger_path = world.path_logger logger_file_connector = LoggerFileConnector(config_manager) logger = LoggerConnector(logger_file_connector) logs = [vars(it) for it in logger.get_borrow_history(number)] for log in logs: log['id'] = number world.logs = logs
def test_singleton(self): # Given config_manager = ConfigManager() config_manager.logger_path = self.config_file_name # When db = DBConnector(DbFileConnector(config_manager)) db2 = DBConnector(DbFileConnector(config_manager)) # Then self.assertEqual(db2, db)
def __init__(self, machine=None): try: self.config_man = ConfigManager() self.configure(machine) self.host, self.port = self.get_server_configs() self.address = zmqc.get_tcp_address(self.host, self.port) self.ctx = zmq.Context(io_threads=1) self.socket = zmqc.init_server(self.ctx, self.address) except Exception as e: print(str(e)) self.terminate()
def i_change_to_not_available_article(step, number): config_manager = ConfigManager() config_manager.db_path = world.path_db db_file_connector = DbFileConnector(config_manager) db = DBConnector(db_file_connector) new_obj = db.change_article_availability(number, False) if new_obj: db.remove_article_by_id(number) db.add_article(new_obj) world.articles = db.get_all_articles()
def i_borrow_article(step, qty, id): config_manager = ConfigManager() config_manager.db_path = world.path_db db_file_connector = DbFileConnector(config_manager) db = DBConnector(db_file_connector) if db.get_article_by_id(id).quantity + int(qty) <= db.get_article_by_id( id).total_quantity: article = db.add_article_quantity(id, int(qty), True) db.remove_article_by_id(id) db.add_article(article) world.articles = db.get_all_articles()
def main(arguments): import os.path if len(arguments) == 0: print('Usage: main.py <settings_file.ini> [truncate_data]') print('Specify truncate_data if you wish to clear all the data') print() print('Example usage: main.py localhost.ini') print('Example usage: main.py localhost.ini truncate_data') exit(2) settings_file = arguments[0] if not os.path.isfile(settings_file): print('Settings file not found: %s' % settings_file) config_manager = ConfigManager.from_file(settings_file) # for stat_collector in stat_collectors: if len(arguments) == 2: if arguments[1] == 'truncate_data': print('not implemented') # stat_collector.cleanup(config_manager) stat_manager = StatManager(config_manager) stat_manager.run()
def insert_db_tables(): dm = DatabaseManager() cm = ConfigManager() tables = dm.list_tables() cm.save_tables(tables) for table in tables: model_id = cm.find_table(table) fields = dm.list_columns(table) cm.save_columns(model_id, fields) cm.get_models_config()
def test_should_get_result_from_source(self): manager = ConfigManager.from_file('test.ini') MockSourceImpl.records.clear() MockSourceImpl.records.append({'a': 10}) delta_queue = ClosableQueue() sm = SourceManager.from_config_manager(config_manager, delta_queue) sm.process_all_sources() self.assertEquals(delta_queue.get().rows, [{'a': 10}])
def create_tables(): config = ConfigManager() conn = sqlite3.connect(config.get("application", "dbname")) cursor = conn.cursor() cursor.execute("DROP TABLE IF EXISTS config") cursor.execute("DROP TABLE IF EXISTS methods") cursor.execute("DROP TABLE IF EXISTS methods_config") cursor.execute("DROP TABLE IF EXISTS models") conn.commit() cursor.execute( """CREATE TABLE config ( id integer NOT NULL, host character varying NOT NULL, user character varying NOT NULL, password character varying NOT NULL, port integer NOT NULL, database character varying NOT NULL, PRIMARY KEY (id) )""" ) cursor.execute( """CREATE TABLE models ( id integer NOT NULL, name character varying NOT NULL, class_name character varying, use boolean DEFAULT 0, PRIMARY KEY (id) )""" ) cursor.execute( """CREATE TABLE methods_config ( id integer NOT NULL, model_id integer NOT NULL, method_name character varying NOT NULL, async boolean DEFAULT 0, use boolean DEFAULT 1, UNIQUE(method_name, model_id), PRIMARY KEY (id), FOREIGN KEY(model_id) REFERENCES models(id) )""" ) cursor.execute( """CREATE TABLE methods ( id integer NOT NULL, model_id integer NOT NULL, method_name character varying NOT NULL, field_name character varying NOT NULL, use boolean DEFAULT 0, PRIMARY KEY (id), UNIQUE(method_name, field_name, model_id), FOREIGN KEY(model_id) REFERENCES models(id) )""" ) conn.commit() insert_db_tables()
def test_should_get_sources_from_multiple_servers(self): cm = ConfigManager.from_file('multiple.ini') source_manager = SourceManager.from_config_manager(cm, ClosableQueue()) self.assertEquals(len(source_manager.sources), 2)
def test_should_load_sql_server_source(self): manager = ConfigManager.from_file('sql_server.ini') delta_queue = ClosableQueue() sm = SourceManager.from_config_manager(manager, delta_queue) self.assertTrue(len(sm.sources) > 1)
import unittest from config_manager import ConfigManager, SourceType from query import Query from source import Source from source_manager import SourceManager from mock_source import MockSourceImpl from stoppable_worker import StoppableWorker, ClosableQueue config_manager = ConfigManager.from_file('test.ini') query = Query(key_column='a', query_name='Mock', mapping={}, non_data_fields=[], get_data=lambda: ['Mocked result']) class SourceManagerTests(unittest.TestCase): def test_should_get_sources_from_type(self): delta_queue = ClosableQueue() sm = SourceManager.from_config_manager(config_manager, delta_queue) mock = SourceType('MockSourceImpl', MockSourceImpl) sources = sm.get_source_for_class(mock, config_manager) self.assertEquals(len(sources), 1) def test_should_add_source(self): delta_queue = ClosableQueue() sm = SourceManager.from_config_manager(config_manager, delta_queue)
def main(arguments): import os.path if len(arguments) == 0: print('Usage: main.py <settings_file.ini> [truncate_data]') print('Specify truncate_data if you wish to clear all the data') print() print('Example usage: main.py localhost.ini') print('Example usage: main.py localhost.ini truncate_data') exit(2) settings_file = arguments[0] if not os.path.isfile(settings_file): print('Settings file not found: %s' % settings_file) config_manager = ConfigManager.from_file(settings_file) es = ElasticsearchAPI.from_config_manager(config_manager) # for stat_collector in stat_collectors: if len(arguments) == 2: if arguments[1] == 'truncate_data': es.delete_index('police_events') result = dumper.get(polisen_events) rssEntries = dumper.parse_to_obj(result) es.create_index('police_events') mapping = { "properties": { "published": { "type": "date", "format": "date_hour_minute_second" }, "title": { "type": "string", "index": "analyzed" }, "link": { "type": "string", "index": "analyzed" }, "summary": { "type": "string", "index": "analyzed" }, "location": { "type": "string", "index": "not_analyzed" }, "reported_date": { "type": "string", "index": "not_analyzed" }, "report_type": { "type": "string", "index": "not_analyzed" }, "location_street": { "type": "string", "index": "not_analyzed" }, "location_commune": { "type": "string", "index": "not_analyzed" }, "location_region": { "type": "string", "index": "not_analyzed" }, "html_body": { "type": "string", "index": "analyzed" }, } } es.set_mapping(index_name='police_events', doc_type='events', mapping=mapping) existing_entries = es.find_ids([r['entry_id'] for r in rssEntries], index_name='police_events', doc_type='events') new_entries = [e for e in rssEntries if e['entry_id'] not in existing_entries] print('Found %i rss items of which %i are new' % (len(rssEntries), len(new_entries))) # Getting the HTML body only for the new entries to reduce overhear for entry in new_entries: entry['html_body'] = dumper.get_link_body(entry['link']) es.consume_all(new_entries, index_name='police_events', doc_type='events', id_column_name='entry_id')
from config_manager import ConfigManager ############################ ## HTTP POSTS ############################ import httplib from xml.dom import minidom def getText(nodelist): rc = [] for node in nodelist: if node.nodeType == node.TEXT_NODE: rc.append(node.data) return ''.join(rc) http_host = ConfigManager.get("app", "callback_host"); http_site = ConfigManager.get("app", "callback_site"); connection = httplib.HTTPConnection(http_host) connection.request("GET", "/" + http_site) response = connection.getresponse() if response.status == 200: data = response.read() dom = minidom.parseString(data) elements = dom.getElementsByTagName("href") if len(elements) > 0: print getText(elements[0].childNodes) else: print "ErrorCode: %d", response.status connection.close()
Generated by 'django-admin startproject' using Django 1.8.4. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os import sys import platform from config_manager import ConfigManager DB_CONFIG = ConfigManager().get_config('database') EMAIL_CONFIG = ConfigManager().get_config('email') SYS_OS = platform.system() AUTH_USER_MODEL = 'users.UserProfile' PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PROJECT_PARENT = os.path.dirname(PROJECT_ROOT) sys.path.insert(0, PROJECT_ROOT) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret!
def start(): # Logging logging.basicConfig(filename="FA_bot.log", level=logging.DEBUG, format="%(asctime)-15s %(message)s") logging.info("FAbot starting up") # Configuration file logging.info("Reading configuration") config = ConfigManager("config.ini") client_email = config.get("email") client_pass = config.get("password") event_manager.announcement_channels = config.get_json("announcement_channels", default=[]) # TODO: probably event manager should take channels from client instead? main_client.channel_whitelist = config.get_json("channel_whitelist", default=[]) main_client.announcement_channels = config.get_json("announcement_channels", default=[]) main_client.welcome_pm = config.get("welcome_pm") main_client.join_announcement = config.get("join_announcement") main_client.leave_announcement = config.get("leave_announcement") # Game servers game_servers.game_servers['arma'] = game_servers.ArmaServer( ip=config.get("arma_server_ip"), port=int(config.get("arma_server_port")) ) game_servers.game_servers['insurgency'] = game_servers.InsurgencyServer( ip=config.get("insurgency_server_ip"), port=int(config.get("insurgency_server_port")) ) # Discord client logging.info("Logging into Discord") main_client.login(client_email, client_pass) if not main_client.is_logged_in: logging.critical("Logging into Discord failed") print('Logging in to Discord failed') exit(1) logging.info("Entering main message event loop") main_client.run()
def test_should_get_multiple_sources_for_one_type(self): cm = ConfigManager.from_file('multiple.ini') configs = cm.get_config_for_source_name('MockSourceImpl') self.assertEquals(len(configs), 2)
import unittest from config_manager import ConfigManager from source_manager import SourceManager from sql_server_source import SQLServerSource, QueryStore from stoppable_worker import ClosableQueue config_manager = ConfigManager.from_file("sql_server.ini") query_store = QueryStore() class SQLServerSourceTestCase(unittest.TestCase): def test_should_create_sql_server_source(self): config = config_manager.get_config("localhost.master") impl = SQLServerSource() sources = impl.get_sources(config) self.assertEquals(len(sources), len(query_store.queries)) f = sources[0] self.assertIsNotNone(f.query) self.assertIsNotNone(f.source_name) self.assertIsNotNone(f.cache) def test_should_execute_query(self): delta_queue = ClosableQueue() sm = SourceManager.from_config_manager(config_manager, delta_queue) # for source in sm.sources: # data = sm.get_data(source) # self.assertTrue(len(data) > 0)
def test_instantiate(self): config_manager = ConfigManager.from_file('test.ini') statman = StatManager(config_manager, SourceManager, ElasticMock) # statman.run()