def process(self): start_time = round(time.time() * 1000) print('Building Extra Ingredients Staging Table') filepath = os.getcwd() + '/watch/' + self.filename cursor = Database().get_connection().cursor() if os.path.isfile(filepath): engine = src.dbEbgine().get_db_engine() ingredienten_date_frame = pd.read_csv(filepath, sep=';') PriceCleanupHelper().clean_prices(ingredienten_date_frame, 'Extra Price', self.filename) # uniform capitalization of ingredient names. ingredienten_date_frame['Ingredient'] = ingredienten_date_frame[ 'Ingredient'].str.title() ingredienten_date_frame.to_sql('extra_ingredienten_ghost', con=engine, if_exists='replace') cursor.execute("select count(*) from extra_ingredienten_ghost") staging_table_count = cursor.fetchone()[0] log_string = str('Extra Ingredients staging table done in ' + str(round(time.time() * 1000) - start_time) + ' ms;' + '\n' + 'Inserted ' + str(staging_table_count) + ' out of ' + str(len(ingredienten_date_frame.index)) + ' rows into staging table. \n') print(log_string) return round(time.time() * 1000) - start_time
async def start_attack(sid, name, params): print("Starting attack", name, params) sys.path.insert( 0, os.path.abspath( os.path.dirname(os.path.realpath(__file__)) + '/config')) db_config = importlib.import_module('config') db = Database(db_config.db_cfg, 'log_kzh') args = [db] for a in attacks: if a.__name__ == name: for k in inspect.getfullargspec(a.__init__)[0][2:]: try: args.append(params[k]) except KeyError: args.append(None) try: attack = a(*args) attack.prepare() async def on_progress(p): await sio.emit("attack status", { "name": name, "status": p }) attack_task = app.loop.create_task(attack.start(on_progress)) running_attacks.append((name, attack_task)) await attack_task except: await error(traceback.format_exc()) return await error("Unknown attack type: {}".format(name))
def __init__(self, pretix_cache: PretixCache): super().__init__(intents=discord.Intents( members=True, messages=True, reactions=True)) self._pretix_cache = pretix_cache self._database = Database() self._i18n = I18n(self._database)
def launch(): # Connect to db tmp_db = Database() core.register("db", tmp_db) # Set all hosts to inactive for h in core.db.find_hosts({}): h['active'] = False core.db.update_host(h) # POX Lib core.openflow.addListenerByName("ConnectionUp", _handleConnectionUp) core.openflow.addListenerByName("ConnectionDown", _handleConnectionDown) core.openflow.addListenerByName("PacketIn", _handlePacketIn) core.openflow.addListenerByName("PortStatus", _handlePortStatus) # Overlord Lib core.devices = Devices() core.hosts = Hosts() core.links = Links() core.forwarding = Forwarding() core.forwarding.add_listener("new_flows", _handleNewFlows) core.hosts.add_listener("host_moved", _handleHostMoved) # Overlord Events web_events = WebMessage() web_events.addListenerByName("WebCommand", _handleWebCommand) t = Thread(target=web_events.run) t.setDaemon(True) t.start()
def process(self): print('Building Other Products Staging Table') filepath = os.getcwd() + '/watch/' + self.filename cursor = Database().get_connection().cursor() start_time = round(time.time() * 1000) if os.path.isfile(filepath): engine = src.dbEbgine().get_db_engine() other_prducts_data_frame = pd.read_excel(filepath) other_prducts_data_frame['spicy'] = other_prducts_data_frame['spicy'].str.replace('Ja', '1') other_prducts_data_frame['spicy'] = other_prducts_data_frame['spicy'].str.replace('Nee', '0') other_prducts_data_frame['vegetarisch'] = other_prducts_data_frame['vegetarisch'].str.replace('Ja', '1') other_prducts_data_frame['vegetarisch'] = other_prducts_data_frame['vegetarisch'].str.replace('Nee', '0') other_prducts_data_frame["spicy"] = pd.to_numeric(other_prducts_data_frame["spicy"]) other_prducts_data_frame["vegetarisch"] = pd.to_numeric(other_prducts_data_frame["vegetarisch"]) other_prducts_data_frame['spicy'] = other_prducts_data_frame['spicy'].astype('bool') other_prducts_data_frame['vegetarisch'] = other_prducts_data_frame['vegetarisch'].astype('bool') other_prducts_data_frame.to_sql('overige_producten_ghost', con=engine, if_exists='replace') cursor.execute("select count(*) from overige_producten_ghost") staging_table_count = cursor.fetchone()[0] log_string = str( 'Other products staging table done in ' + str(round(time.time() * 1000) - start_time) + ' ms;' + '\n' + 'Inserted ' + str(staging_table_count) + ' out of ' + str( len(other_prducts_data_frame.index)) + ' rows into staging table. \n') print(log_string) return round(time.time() * 1000) - start_time
def node_rpc_handler(): try: evaluation_1 = rpc.validate_request(request, app.config) if evaluation_1 is True: req = request.get_json() evaluation_2 = rpc.validate_obj(req, app.config) if evaluation_2 is True: method_name = req["method"] params = req.get("params", {}) _id = req.get("id", None) database = Database(app.config["database_path"]) if method_name == "transactions_in_mempool": database.chain_full_url = app.config["chain_url"] + ":" + app.config["chain_port"] + "/" if app.config["chain_api_key"] is not None and app.config["chain_api_key"] is not "": database.chain_api_key = app.config["chain_api_key"] return json.dumps(rpc.make_success_resp(hc.transactions_in_mempool(database), req["id"])) else: ###Method not found return json.dumps(make_error_resp(const.NO_METHOD_CODE, const.NO_METHOD, _id)) else: return json.dumps(evaluation_2) else: return json.dumps(evaluation_1) except Exception as e: ###Internal error return json.dumps(rpc.make_error_resp(const.INTERNAL_ERROR_CODE, const.INTERNAL_ERROR, _id))
def process(self): print('Building Pizza Crusts Staging Table') filepath = os.getcwd() + '/watch/' + self.filename cursor = Database().get_connection().cursor() start_time = round(time.time() * 1000) if os.path.isfile(filepath): engine = src.dbEbgine().get_db_engine() pizza_bodems_data_frame = pd.read_excel(filepath) pizza_bodems_data_frame.to_sql('pizza_bodems_ghost', con=engine, if_exists='replace') cursor.execute("select count(*) from pizza_bodems_ghost") staging_table_count = cursor.fetchone()[0] log_string = str('Pizza crusts staging table done in ' + str(round(time.time() * 1000) - start_time) + ' ms;' + '\n' + 'Inserted ' + str(staging_table_count) + ' out of ' + str(len(pizza_bodems_data_frame.index)) + ' rows into staging table. \n') print(log_string) return round(time.time() * 1000) - start_time
def test_get_known_words(self): words_list_comparator = ContainerComparator( elem_equality_comparator=lambda lhs, rhs: lhs == rhs, sort_key=lambda w: w) database = Database(self.db_handle) self.assertEqual(len(database.get_known_words()), 0) book1 = Book.from_path(f"{base_dir}/test/data/book.epub") for word in book1.words: word.mark_if_known(True) book2 = Book.from_path(f"{base_dir}/test/data/book2.epub") for word in book2.words: word.mark_if_known(True) database.store_book(book1) database.store_book(book2) book1_raw_known_words = set(w.stored_word for w in book1.known_words) book2_raw_known_words = set(w.stored_word for w in book2.known_words) both_books_words = book1_raw_known_words.union(book2_raw_known_words) self.assertTrue( words_list_comparator(database.get_known_words(), both_books_words))
def main(): sc = Scraper() mail = Mail() discount_amount = 20 while True: product_list = sc.scrape_product_links(products) #iterate over product for product in product_list: print("Average price of", product.name + ":", product.average_price()) #initialize db, if initialized before it will connect to db #insert price of the product to database with Database('products.db') as db: db.insert_price(product) #iterate over sellers for seller in product.prices(): #compare prices of sellers with average price #if price of seller is lower than %discount_amount of the product send e-mail discounted_price = ( product.average_price() - (product.average_price() * discount_amount / 100)) if product.prices()[seller] < discounted_price: print(seller, "has a discount") discount_link = product.links()[seller] mail.send_mail( mail.create_mail_body(product.name, discount_link, product.links())) print("**************************************") #wait 1 hour randomly sleep(randint(3500, 3600))
def test_sqlitedb_get_entities(): """Test if SqliteDB can query for entities successfully""" # Query entities sqlitedb = Database(SqliteDB(db_file)) entities = sqlitedb.get_entities() assert len(entities) == 2, "number of entities queried is not equal to 2"
def main(args): args = parser.parse_args(args) if args.command == 'create': db = Database(os.path.basename(args.database)) elif args.command in ['add', 'list']: db = Database.load(args.database) if args.command == 'list': print('Database {} contains {} images'.format(db.name, len(db))) for path, data in db.iter_images(): print(path) return if not args.input: print('Input file/folder required') return if args.root_dir: args.root_dir = os.path.abspath(args.root_dir) args.input = os.path.abspath(args.input) if os.path.isdir(args.input): add_from_folder(db, args.input, args.root_dir) elif os.path.splitext(args.input)[1] == '.csv': add_from_csv(db, args.input, args.root_dir) else: print('Could not interpret input {}'.format(args.input)) db.save(args.database)
def test_storing_and_restoring_book(self): database = Database(self.db_handle) book = Book.from_path(f"{base_dir}/test/data/book.epub") # small data manipulation to make sure that it was retained when saving to db for word in book.words: word.mark_if_known(True) database.store_book(book) restored_book = database.restore_book(book.name) words_list_comparator = ContainerComparator( elem_equality_comparator=self._are_words_equal, sort_key=lambda w: w.stored_word) self.assertEqual(book.are_all_words_processed(), restored_book.are_all_words_processed()) self.assertTrue( words_list_comparator(book.known_words, restored_book.known_words)) self.assertTrue( words_list_comparator(book.unknown_words, restored_book.unknown_words)) self.assertEqual(book.name, restored_book.name) self.assertTrue(words_list_comparator(book.words, restored_book.words)) self.assertEqual(book.flashcards, restored_book.flashcards)
def create_app(app_config: AppConfig = AppConfig(DEF_CONFIG_FILENAME)): app = Flask(__name__) if app_config.ENV == 'development': # refreshing application app.config = { **app.config, 'SEND_FILE_MAX_AGE_DEFAULT': 0, 'TEMPLATES_AUTO_RELOAD': True } elif app_config.ENV == 'production': pass else: raise WrongEnvironment(app_config.ENV) app.config = {**app.config, **app_config.__dict__} database = Database('data/users.pickle', 'data/analyses.pickle', app_config.UPLOAD_FOLDER) @app.context_processor def inject_globals(): return { 'home': f'http://{app_config.HOST}:{app_config.PORT}', } add_api(app, database) return app
def __init__(self, parent=None): super(Restaurant, self).__init__(parent) self.setupUi(self) self.database = Database(FILENAME) self.customer_editable = False self.order_editable = False self.menu_editable = False self.addCustomerBtn.setEnabled(False) self.deleteCustomerBtn.setEnabled(False) self.addOrderBtn.setEnabled(False) self.deleteOrderBtn.setEnabled(False) self.addItemBtn.setEnabled(False) self.deleteItemBtn.setEnabled(False) self.fill_customer_table() self.fill_order_table() self.fill_menu_table() self.fill_statistics_table() self.connect(self.addCustomerBtn, SIGNAL('clicked()'), self.add_customer) self.connect(self.deleteCustomerBtn, SIGNAL('clicked()'), self.delete_customer) self.connect(self.addOrderBtn, SIGNAL('clicked()'), self.add_order) self.connect(self.deleteOrderBtn, SIGNAL('clicked()'), self.delete_order) self.connect(self.addItemBtn, SIGNAL('clicked()'), self.add_menu_item) self.connect(self.deleteItemBtn, SIGNAL('clicked()'), self.delete_menu_item) self.connect(self.customerEditButton, SIGNAL('clicked()'), self.edit_customer_info) self.connect(self.orderEditButton, SIGNAL('clicked()'), self.edit_order_info) self.connect(self.menuEditButton, SIGNAL('clicked()'), self.edit_menu_info)
def migrate_product_data(self): db = Database() start_time = round(time.time() * 1000) print('Starting products staging table data migration to target') db.execute("SET NOCOUNT ON exec ImportCategoryData") db.execute("SET NOCOUNT ON exec ImportIngredientData") db.execute("SET NOCOUNT ON exec ImportSauceData") db.execute("SET NOCOUNT ON exec ImportCrustData") db.execute("SET NOCOUNT ON EXEC ImportPizzaData") db.execute("SET NOCOUNT ON exec ImportOtherProductData") print('products data migration to target done\n') engine = src.dbEbgine().get_db_engine() try: error_dataframe = pd.read_sql( "SELECT * FROM product_import_error_log", engine) if len(error_dataframe) > 0: print('Product migration complete with ' + str(len(error_dataframe)) + ' errors in ' + str(round(time.time() * 1000) - start_time) + ' seconds. See error logs for details.\n') error_string = "Product migration errors found: \n" \ + self.__indent(error_dataframe.to_string(), 30) Logger().error(error_string) else: print('Product migration complete with no errors in ' + str(round(time.time() * 1000) - start_time) + 'seconds.\n') except Exception: print("No import errors found") return round(time.time() * 1000) - start_time
def main(): assert len(sys.argv) >= 2, "Missing command" command = sys.argv[1] if command == "create": assert len( sys.argv) == 4, "Invalid number of arguments for calling create" assert os.path.isdir(sys.argv[2]), "Provided path is not a folder" db = Database(sys.argv[2]) db.export(sys.argv[3]) if command == "extract": assert len( sys.argv) >= 3, "Invalid number of arguments for calling extract" assert os.path.isfile(sys.argv[2]), "Invalid database file" if os.path.isdir(sys.argv[3]): result = query_yes_no( "Output path already exists, do you want to use it?", default='no') if not result: sys.exit(0) database_path = sys.argv[2] export_path = sys.argv[3] conditions = sys.argv[4:] db = pickle.load(open(database_path, "rb")) assert isinstance(db, Database), "File is not a valid database" print("Check conditions...") condition = ConditionManager(conditions) db.export_files(condition_manager=condition, output_path=export_path)
def run(self, interface, **kwargs): book = ChoseBook().run(interface, **kwargs) #TODO add excluding known words it = Iterator(book.words) try: while not book.are_all_words_processed(): idx, word = it.get() event_prompt = self._get_enter_word_prompt(word, idx, len(it)) feature_str = interface.get_input( event_prompt, input_processor=self.event_handler.input_processor) self.event_handler.process(interface, feature_str, it=it, idx=idx, word=word, size=len(it), book=book) interface.display_info( "All words in the book has been processed. ") finally: db = Database() db.store_book(book)
def photo(): print("photo requested") db = Database() db.add_telecommand("TAKE PHOTO") sleep(3) img_path = f"static/{db.get_last_photo_name()}" return send_file(img_path, mimetype='image/gif')
def main(): database = Database(app.config["database_path"], app.config["chain_url"] + ":" + app.config["chain_port"] + "/", app.config["chain_api_key"]) database.init() ###Update database at queryInterval and serve flask app p = Process(target=update, args=(database,)) p.start() serve(app, host='0.0.0.0', port=app.config["app_api_port"]) p.join()
def __init__(self, **kwargs): super(OutboxLayout, self).__init__(**kwargs) self.counter = 0 self.mails = [] self.nRead = 0 self.db = Database() Clock.schedule_once(self.scheduled_mail_check, 0) Clock.schedule_interval(self.scheduled_mail_check, 60)
def test_sqlitedb_get_sentences(): """Test if SqliteDB can query for sentences with entities successfully""" # Query sentence with entity specified sqlitedb = Database(SqliteDB(db_file)) entities = sqlitedb.get_sentences("google") assert len( entities) == 1, "number of sentence with 'google' entity is not 1"
def __init__(self): self.config = Config() self.core = Core() self.logs = Logs() self.database = Database() self.data = {} # Data assets self.running = True
def test_get_last_row_id(): expected = 123 cursor.lastrowid = expected with patch.object(sqlite3, 'connect', return_value=connection): db = Database(Mock()) actual = db.get_last_row_id() assert actual is expected
def setup(): """ Restore database to default before each test cases """ client = MongoClient() db = Database(client['quiz_database']) db.restore_default_db() yield 'setup' db.restore_default_db()
def test_fetchone(): expected = (1, ) cursor.fetchone.return_value = expected with patch.object(sqlite3, 'connect', return_value=connection): db = Database(Mock()) actual = db.fetchone() cursor.fetchone.assert_called_once() assert actual is expected
def setUpClass(self): self.db = Database(UseMemory) self.user = { "username": "******", "password": "******", "account_type": "voter", } self.non_existent_user = "******"
def __init__(self, parent=None): super(ExistingCustomer, self).__init__(parent) self.setupUi(self) self.database = Database(FILENAME) self.connect(self.confirmButton, SIGNAL('clicked()'), self.show_order_menu) self.connect(self.previousButton, SIGNAL('clicked()'), self.show_welcome)
def run(self, interface, **kwargs): book_path = interface.get_input("Enter path to ebook", input_processor=FilePathProcessor()) book = Book.from_path(book_path) db = Database() db.store_book(book) return book
def setUp(self): connection_string = "file:testdb?mode=memory&cache=shared" self.conn = sqlite3.connect(connection_string) self.db = Database(connection_string) self.conn.execute( "CREATE TABLE IF NOT EXISTS user (username TEXT NOT NULL UNIQUE, embedding REAL)" )
def make_customer(self): first_name = self.firstNameInput.text() last_name = self.lastNameInput.text() postcode = self.postcodeInput.text() phone = self.phoneInput.text() email = self.emailInput.text() customer = Customer(first_name, last_name, postcode, phone, email) database = Database(FILENAME) return customer, database