def __init__(self,*args,**kwargs): Server.__init__(self,*args,**kwargs) Log.Print("Starting server...") self.ip = kwargs["localaddr"][0] self.port = kwargs["localaddr"][1] self.svr_name = raw_input("Server name: ") while not MySQL.AddServer(self.svr_name,self.ip): Log.Print("Name allready exists") self.svr_name = raw_input("New name: ") MySQL.CheckDeadServers() ##### START UDP ##### self.UDPconnector = ServerUDP(*args,**kwargs) self.UDPconnector.SetTarget(self) self.UDPconnector.SetPing(config.ping_server) self.id = 0 self.mode = "Quickmatchs server" self.max_players = 10 self.name = "Newtonis's server" self.clients = dict() self.players = dict() self.gameWorlds = dict() self.dictOrder = [] self.play = True #self.commandsThread = threading.Thread(target=self.CommandThreadDef,name="Commands thread") #self.commandsThread.start() self.Add5Rooms() self.last_time_sql_updated = time.time()
class StockHistory(): def __init__(self): self.api_bridge = MySQL() logging.basicConfig(filename='jobs.log', level=logging.DEBUG) def get_stock_history(self): raw_data = requests.get( "http://s.cafef.vn/ajax/bieudokythuat.ashx?symbol=VNINDEX,VN30INDEX,HNX30INDEX,HNXINDEX,VNINDEX&type=compare" ) str_data = raw_data.content.decode('utf-8') result = re.findall(r'.*\[(.*?)\].*', str_data) # print(len(result)) del result[1:3] with open('stock_history.txt', 'a') as outFile: outFile.write('\n' + str(datetime.now()) + "***Stock:" + str(result[1])) try: for index, data in enumerate(result): symbol = self.api_bridge.get_symbol(index) for ele in re.findall(r'({.*?})', data): detail = json.loads(ele) self.api_bridge.insert_stock(detail, symbol) # sys.exit() finally: # close connection self.api_bridge.close()
def process(table): global dump_date global file_destination mysql = MySQL(dump_date) mysql.optimize_load() print(f"processing table {table}") csv_file = f"{file_destination}/{table}.csv" mysql.restore_db(csv_file, table) print(f"finished processing file {table}") mysql.commit() mysql.disconnect()
def add_foreign_keys_process(): dump_date = get_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) mysql = MySQL(dump_date) mysql.optimize_load() foreign_key_file = open(FOREIGN_KEYS_FILE, 'r').read() mysql.execute_file(foreign_key_file) mysql.disconnect()
def create_schema_process(): dump_date = get_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) dump_schema_file = os.path.join( get_dump_folder_endpoint(ARCHIVES_BASE_FOLDER, MYSQL, dump_date), f'{SCHEMA}.sql') change_content_in_file(GHTORRENT, f"{GHTORRENT}-{dump_date}", dump_schema_file) copy_file(dump_schema_file, SCHEMA_FILE) change_content_in_file(f"{GHTORRENT}-{dump_date}", GHTORRENT, dump_schema_file) mysql = MySQL() mysql.execute_schema_file(SCHEMA_FILE) mysql.add_user_name_column() mysql.add_user_email_column() mysql.disconnect()
def LogicUpdate(self): self.Pump() for room in self.gameWorlds.keys(): self.gameWorlds[room].LogicUpdate() if time.time() - self.last_time_sql_updated > 100: MySQL.UpdateServer(self.ip,self.svr_name) self.last_time_sql_updated = time.time()
def restore_old_users_data_process(): dump_date = get_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) previous_mysql_dump_date = get_previous_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) db_name = f"{GHTORRENT}-{previous_mysql_dump_date}" if dump_date != previous_mysql_dump_date: mysql = MySQL(f"{dump_date}") mysql.update_users(db_name) mysql.commit() mysql.disconnect()
def update_mysql_process(): dump_date = get_dump_date(MONGO, ARCHIVES_BASE_FOLDER) if is_dump_date_valid(dump_date): previous_mysql_dump_date = get_previous_dump_date( MYSQL, ARCHIVES_BASE_FOLDER) mysql = MySQL(previous_mysql_dump_date) db_name = f"{GHTORRENT}-{USERS_TEMP}" mysql.update_users(db_name) mysql.commit() mysql.disconnect()
class CurrencyHistory(): def __init__(self): self.api_bridge = MySQL() def get_currency_history(self): with open('currency_history.txt', 'a') as outFile: now = str(datetime.now().date()) raw_data = requests.get( "https://api.exchangeratesapi.io/history?start_at=2000-01-01&end_at=" + now + "&symbols=USD") result = json.loads(raw_data.content.decode('utf-8'))["rates"] try: for key, value in result.items(): outFile.write('\n' + str(datetime.now()) + ":**currency:" + str(key) + ", " + str(value)) self.api_bridge.insert_currency(key, value["USD"], symbol="USD") finally: # close connection self.api_bridge.close()
def drop_mysql_schema_process(): dump_date = get_dump_date(MONGO, ARCHIVES_BASE_FOLDER) if is_dump_date_valid(dump_date): mysql = MySQL() db_name = f"{GHTORRENT}-{USERS_TEMP}" mysql.drop_database(db_name) mysql.disconnect()
def create_mysql_schema_process(): dump_date = get_dump_date(MONGO, ARCHIVES_BASE_FOLDER) if is_dump_date_valid(dump_date): schema_file = f'/{USERS_TEMP}-{SCHEMA}.sql' mysql = MySQL() mysql.execute_schema_file(schema_file) mysql.disconnect()
def drop_old_database_process(): dump_date = get_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) previous_mysql_dump_date = get_previous_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) if dump_date != previous_mysql_dump_date: mysql = MySQL() db_name = f"{GHTORRENT}-{previous_mysql_dump_date}" mysql.drop_database(db_name) mysql.disconnect()
def create_indexes_process(): dump_date = get_dump_date(MYSQL, ARCHIVES_BASE_FOLDER) dump_indexes_file = os.path.join( get_dump_folder_endpoint(ARCHIVES_BASE_FOLDER, MYSQL, dump_date), f'{INDEXES}.sql') change_content_in_file(GHTORRENT, f"{GHTORRENT}-{dump_date}", dump_indexes_file) copy_file(dump_indexes_file, INDEXES_FILE) change_content_in_file(f"{GHTORRENT}-{dump_date}", GHTORRENT, dump_indexes_file) indexes_files = open(INDEXES_FILE, 'r').read() mysql = MySQL(dump_date) mysql.execute_file(indexes_files) mysql.disconnect()
def __init__(self, type): self.database = MySQL() chrome_options = webdriver.ChromeOptions() prefs = {"profile.default_content_setting_values.notifications": 2} chrome_options.add_experimental_option("prefs", prefs) chrome_options.add_argument("--headless") chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("start-maximized") chrome_options.add_argument("disable-infobars") chrome_options.add_argument("--disable-extensions") self.driver = webdriver.Chrome(executable_path=DRIVER, chrome_options=chrome_options) self.wait = WebDriverWait(self.driver, 10) logging.basicConfig(filename='realtime.log', level=logging.INFO) if type == 0: # CURRENCY self.database_currency = MySQL() self.driver.get("https://vn.tradingview.com/symbols/EURUSD/") currency = threading.Thread( target=self.get_realtime_data, kwargs={ "table": TABLE_REALTIME_CURRENCY, "symbol": "USD", "type": type, "wait_time": WAIT_CURRENCY, "xpath_value": "//*[@id='anchor-page-1']/div/div[3]/div[1]/div/div/div/div[1]/div[1]", "xpath_change_1": "//*[@id='anchor-page-1']/div/div[3]/div[1]/div/div/div/div[1]/div[3]/span[1]", "xpath_change_2": "//*[@id='anchor-page-1']/div/div[3]/div[1]/div/div/div/div[1]/div[3]/span[2]", "xpath_volumn": "//*[@id='anchor-page-1']/div/div[3]/div[3]/div[3]/div[1]", "database": self.database_currency }) currency.start() currency.join() else: # STOCK self.driver.get("http://banggia2.ssi.com.vn/") self.database_vnindex = MySQL() vnindex_stock = threading.Thread( target=self.get_realtime_data, kwargs={ "table": TABLE_REALTIME_STOCK, "symbol": "VNIndex", "type": type, "wait_time": WAIT_STOCK, "xpath_value": '//*[@id="tdHoseVnIndex"]', "xpath_change_1": '//*[@id="tdHoseChangeIndex"]', "xpath_change_2": '', "database": self.database_vnindex }) self.database_vn30index = MySQL() vn30_stock = threading.Thread(target=self.get_realtime_data, kwargs={ "table": TABLE_REALTIME_STOCK, "symbol": "VN30INDEX", "type": type, "wait_time": WAIT_STOCK, "xpath_value": '//*[@id="tdHose30VnIndex"]', "xpath_change_1": '//*[@id="tdHose30ChangeIndex"]', "xpath_change_2": '', "database": self.database_vn30index }) self.database_hnxindex = MySQL() hnxindex_stock = threading.Thread( target=self.get_realtime_data, kwargs={ "table": TABLE_REALTIME_STOCK, "symbol": "HNXINDEX", "type": type, "wait_time": WAIT_STOCK, "xpath_value": '//*[@id="tdHnxIndex"]', "xpath_change_1": '//*[@id="tdHnxChangeIndex"]', "xpath_change_2": '', "database": self.database_hnxindex }) self.database_hnx30index = MySQL() hnx30_stock = threading.Thread(target=self.get_realtime_data, kwargs={ "table": TABLE_REALTIME_STOCK, "symbol": "HNX30INDEX", "type": type, "wait_time": WAIT_STOCK, "xpath_value": '//*[@id="tdHnx30Index"]', "xpath_change_1": '//*[@id="tdHnx30ChangeIndex"]', "xpath_change_2": '', "database": self.database_hnx30index }) vnindex_stock.start() vn30_stock.start() hnxindex_stock.start() hnx30_stock.start() vnindex_stock.join() vn30_stock.join() hnxindex_stock.join() hnx30_stock.join()
def __init__(self): self.api_bridge = MySQL() logging.basicConfig(filename='jobs.log', level=logging.DEBUG)
def restore_users_schema_process(): dump_date = get_dump_date(MONGO, ARCHIVES_BASE_FOLDER) if is_dump_date_valid(dump_date): csv_file = os.path.join(ARCHIVES_BASE_FOLDER, f"{MONGO}-{USERS}-{dump_date}.csv") mysql = MySQL(USERS_TEMP) mysql.optimize_load() mysql.restore_users_schema(csv_file, USERS) mysql.commit() mysql.create_index_users_login() mysql.disconnect()
from database.mysql import Database as MySQL db_config = { 'host': 'localhost', 'user': '******', 'password': '******', 'db': 'pj' } db = MySQL(db_config) print(db.get_jname_by_id(-1))
def __init__(self): self.api_bridge = MySQL()