def import_song_statistic_from(self, database_file): self.logger.info('Importing database songs statistics') try: from playhouse.sqlite_ext import SqliteExtDatabase other_database = SqliteExtDatabase(database_file) cursor = other_database.execute_sql( "SELECT ADDED, LAST_PLAYED, PLAYED, TITLE, ARTIST, ALBUM FROM SONG" ) with database_context.atomic(): for row in cursor.fetchall(): database_context.execute_sql( "UPDATE SONG SET ADDED = ?, LAST_PLAYED = ?, PLAYED = ? WHERE TITLE = ? AND ARTIST = ? AND ALBUM = ?", row) other_database.close() except: self.logger.exception( 'Error while importing database songs statistics')
temp = FloatField() date = DateTimeField() db.connect() db.create_tables([CityTemp], safe=True) now = dt.datetime.utcnow() current_hour = now.hour cities = pd.read_csv("data/cities.csv") # Removes old rows CityTemp.delete().where( CityTemp.date < dt.datetime.now() - dt.timedelta(days=10)).execute() db.execute_sql("optimize table CityTemp;") for index, city in cities.iterrows(): url = "http://api.openweathermap.org/data/2.5/weather?q=%s,%s&APPID=%s" % ( city["City"], city["Country"], os.environ["OWMKEY"]) r = requests.get(url) time.sleep(2) json_data = json.loads(r.text) try: temp = json_data["main"]["temp"] - 272.15 CityTemp.create(city=city["City"], station_id=city["station_id"].split(",")[0].replace( "(", ""), date=dt.datetime.now(), temp=temp)
def run_save_command(task_id, current_print, cmd_queue): from ...env import Env from ...data.db import Database from playhouse.sqlite_ext import SqliteExtDatabase import zmq Env.setup() conn = SqliteExtDatabase( Database.path, pragmas=( ('cache_size', -1024 * 64), # 64MB page-cache. ('journal_mode', 'wal'), # Use WAL-mode (you should always use this!). ('foreign_keys', 1), ('threadlocals', True))) conn.connect() # res = conn.execute_sql("PRAGMA journal_size_limit = -1;").fetchall() # res = conn.execute_sql("PRAGMA wal_autocheckpoint = -1;").fetchall() res = conn.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);").fetchall() print(f'PROCESS INITIAL WALL CHECKPOINT = {res}', flush=True) # res = conn.execute_sql("PRAGMA wal_autocheckpoint;").fetchall() PrinterCommand._meta.database = conn Print._meta.database = conn save_queue = OrderedDict() agent = SaveAgent(save_queue) agent_thread = Thread(target=agent.start, daemon=True) agent_thread.start() start_time = time.time() cnt = 1 running = True while running: try: # payload = resp_queue.get() payload = None polltimeout = 0.0001 respcommand = None # if queuesize >= maxqueuesize: # polltimeout = 10 # res = cmd_queue.poll(polltimeout) # if res: payload = cmd_queue.recv() # payload = cmd_queue.recv_multipart() # queuesize -= 1 if payload: (key, prnt, respcommand) = payload # print(f"JSONCMD = {jsoncmd}") # respcommand = pickle.loads(pcb) # respcommand = PrinterCommand(**jsoncmd) if key == "cmd": cnt += 1 # respcommand = json.loads(resp.decode('utf-8')) # if cnt % 20 == 0: # print(f"Save command cnt: {cnt} {time.time()}") if respcommand: # print(f"has resp command {respcommand._meta.database.__dict__}") with agent.condition: save_queue[prnt.id] = prnt.state if respcommand.status == "error": break # if respcommand["status"] == "finished": # if respcommand.status == "finished": # current_print.state["pos"] = pos # current_print.save() # prnt.save() # Print.update(state=prnt.state).where(Print.id == prnt.id).execute() # respcommand.save() # cmd_queue.send(('done', respcommand.id)) elif key == "close": running = False except Exception as e: print(f"RES READ EXCEPTION {type(e).__name__}, {str(e)}", flush=True) # cmd_queue.put(("state", {"status": "error", "reason": str(e)})) cmd_queue.send(("state", {"status": "error", "reason": str(e)})) agent.running = False res = conn.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);").fetchall() print(f'WALL CHECKPOINT = {res}')