def use_randomized_db(use_source_db): source_db = use_source_db if os.path.exists(definitions.RANDOMIZED_DB_PATH): os.remove(definitions.RANDOMIZED_DB_PATH) # we can see randomized db for debug purposes if put to the disk randomized_db = DBHelper(definitions.RANDOMIZED_DB_PATH) source_db.backup(randomized_db.connection) randomize_ship_part(randomized_db.connection) randomize_parts_value(randomized_db.connection) randomized_db.connection.commit() yield randomized_db.connection randomized_db.close()
def pytest_generate_tests(metafunc): # find specific fixture if definitions.RANDOMIZED_DB_PARAMETRIZATION_NAME in metafunc.fixturenames: source_db = DBHelper(definitions.DB_PATH) # get ship names c = source_db.connection.cursor() c.execute(f'SELECT * FROM Ships') # TODO: вынести Ships в константу? ship_names = c.fetchall() # get ships table columns c_read_columns = source_db.connection.cursor() c_read_columns.execute(f'SELECT name FROM PRAGMA_TABLE_INFO("Ships")') ships_table_columns = c_read_columns.fetchall() ships_table_columns = [x for t in ships_table_columns for x in t] del ships_table_columns[0] argvalues = [] ids = [] # fill values for parametrization for ship_name in ship_names: for part_name in ships_table_columns: specific_row_index = ships_table_columns.index(part_name) argvalues.append((ship_name[0], part_name, ship_name[specific_row_index + 1])) ids.append(f'{ship_name[0]}, {part_name}') source_db.close() if not argvalues: raise ValueError("Values not loaded") return metafunc.parametrize( definitions.RANDOMIZED_DB_PARAMETRIZATION_NAME, argvalues, ids=ids, scope='session')
# Split list into list of n-size lists def chunks(arr, n): return [arr[i:i + n] for i in range(0, len(arr), n)] if __name__ == "__main__": parser = argparse.ArgumentParser( description="Runs 'booking-scraper' with commands set in DB") parser.add_argument("processes", help="The number of processes", type=int) parser.add_argument("spiders", help="The number of spiders per process", type=int) args = parser.parse_args() commands = [] db = DBHelper() start_configs = db.select_run_configs() for conf in start_configs: c_in = conf.checkin_date c_out = conf.checkout_date d = { "config_id": conf.id, "concurrent_request_amount": conf.concurrent_request_amount, "city": conf.city, "country": conf.country, "checkin_monthday": c_in.day, "checkin_month": c_in.month, "checkin_year": c_in.year, "checkout_monthday": c_out.day, "checkout_month": c_out.month, "checkout_year": c_out.year,
def use_source_db(): source_db = DBHelper(definitions.DB_PATH) yield source_db.connection source_db.close()
def create_engine(conn, engine): """ Create a new engine into the engines table :param conn: :param engine: :return: engine id """ query = """ INSERT INTO engines(engine,power,type) VALUES(?,?,?) """ cur = conn.cursor() cur.execute(query, engine) return cur.lastrowid if __name__ == '__main__': conn = DBHelper(definitions.DB_PATH).connection with conn: weapons = [] for i in range(0, 20): weapon = (f'Weapon-{i+1}', random_helper.randint_component(), random_helper.randint_component(), random_helper.randint_component(), random_helper.randint_component(), random_helper.randint_component()) weapons.append(weapon) create_weapon(conn, weapon) hulls = [] for i in range(0, 5): hull = (f'Hull-{i+1}', random_helper.randint_component(), random_helper.randint_component(),
class SpiderState(object): def __init__(self, crawler): self.current_state = None self.db = DBHelper() self.crawler = crawler self.t = None @classmethod def from_crawler(cls, crawler): if not crawler.settings.getbool('SPIDER_STATE_EXT_ENABLED'): raise NotConfigured ext = cls(crawler) crawler.signals.connect(ext.run_state_checker, signal=signals.engine_started) crawler.signals.connect(ext.stop_state_checker, signal=signals.spider_closed) return ext def run_state_checker(self): if self.crawler.crawling is not False: self.t = Timer(5.0, self.change_spider_state) self.t.start() def stop_state_checker(self): self.t.cancel() def change_spider_state(self): spider_conf = self.db.select_spider_state( self.crawler.spider.config_id) self.crawler.spider.logger.info( "\nSpider id: {0}\nSpider state: {1}".format( str(self.crawler.spider.config_id), str(spider_conf.state))) if spider_conf.state != self.current_state: self.current_state = spider_conf.state if spider_conf.state == 1: self.pause_spider() self.run_state_checker() elif spider_conf.state == 2: self.resume_spider() self.run_state_checker() elif spider_conf.state == 3: self.stop_spider() else: self.run_state_checker() else: self.run_state_checker() def pause_spider(self): self.crawler.engine.pause() self.current_state = 1 print("Spider paused.") def resume_spider(self): self.crawler.engine.unpause() self.current_state = 2 print("Spider resumed.") def stop_spider(self): self.current_state = 3 self.crawler.stop()
def __init__(self, crawler): self.current_state = None self.db = DBHelper() self.crawler = crawler self.t = None
def show_db_tables(conn): try: c = conn.cursor() c.execute('SELECT name from sqlite_master where type= "table"') print(c.fetchall()) except Error as e: logging.exception(e) if __name__ == '__main__': if os.path.exists(definitions.DB_PATH): os.remove(definitions.DB_PATH) conn = DBHelper(definitions.DB_PATH).connection sql_create_ships_table = """CREATE TABLE Ships ( ship TEXT PRIMARY KEY, weapon TEXT, hull TEXT, engine TEXT );""" sql_create_weapons_table = """CREATE TABLE weapons ( weapon TEXT PRIMARY KEY, "reload speed" INTEGER, "rotational speed" INTEGER, diameter INTEGER, "power volley" INTEGER, count INTEGER, FOREIGN KEY (weapon) REFERENCES Ships (weapon)