def test_load_from_file_into_memory(self): source_db = sqlite3.connect(self.db_filename) self.open_connections.append(source_db) s_curr = source_db.cursor() s_curr.execute('CREATE TABLE baz(foo VARCHAR(20))') for value in ["abc", "x-x-x", "123"]: s_curr.execute('INSERT INTO baz VALUES (?)', (value, )) source_db.commit() s_curr.execute('SELECT * FROM baz ORDER BY foo DESC') source_data = s_curr.fetchall() s_curr.close() source_db.rollback() # save database to file db_dest = sqlite3.connect(':memory:') sqlitebck.copy(source_db, db_dest) # remove file database os.unlink(self.db_filename) # not try to fetch this data from file database self.open_connections.append(db_dest) d_curr = db_dest.cursor() d_curr.execute('SELECT * FROM baz ORDER BY foo DESC') dest_data = d_curr.fetchall() d_curr.close() self.assertEqual(source_data, dest_data)
def __exit__(self, exc_class, exc, traceback): self.conn.commit() if self.copy_on_exit: target = sqlite3.connect(self.copy_on_exit) sqlitebck.copy(self.conn, target) target.close() self.conn.close()
def load_db(self,dbfile=os.path.join(DATA_DIR,'asx.db')): print("Loading db from from %s" % dbfile) bdb = sqlite3.connect(dbfile) sqlitebck.copy(bdb,self._conn) bdb.close() print("Loaded db from from %s" % dbfile) return True
def main(): import sqlite3, sqlitebck conn1 = sqlite3.connect('data.sqlite') conn2 = sqlite3.connect('data_backup/data_backup.sqlite') sqlitebck.copy(conn1, conn2) conn1.close() conn2.close()
def move_to_RAM(self): if not hasattr(self,'old_conn') and self.db_type == 'sqlite3': self.connection.commit() self.old_conn = self.connection self.old_cur = self.cursor self.connection = self.sql.connect(':memory:')#'file:' + self.uuid + '?mode=memory&cache=shared',uri=True) self.cursor = self.connection.cursor() sqlitebck.copy(self.old_conn,self.connection)
def dump_db(filename): conn = create_connection(db_file) backup_conn = create_connection(filename) try: conn.backup(backup_conn) except AttributeError: # Python < 3.7 import sqlitebck sqlitebck.copy(conn, backup_conn)
def move_to_RAM(self): if not hasattr(self, 'old_conn') and self.db_type == 'sqlite3': self.connection.commit() self.old_conn = self.connection self.old_cur = self.cursor self.connection = self.sql.connect( ':memory:' ) #'file:' + self.uuid + '?mode=memory&cache=shared',uri=True) self.cursor = self.connection.cursor() sqlitebck.copy(self.old_conn, self.connection)
def register_backup(): #/home/backup/... and /home/app/... is the path in docker file system ,not the path in the host backup_name = "/home/backup/register_" + datetime.now().strftime( '%Y%M%d_%H_%M_%S') + ".db" conn = sqlite3.connect("/home/app/dev.db") backcon = sqlite3.connect(backup_name) sqlitebck.copy(conn, backcon) conn.close() backcon.close() print "backup complete! backup: " + backup_name
def disconnectDB2(self): """ Disconnect the database and copy on to the disk """ #close cursor self.cur.close() #Copy in memory databse to file if(self.fastmode): sqlitebck.copy(self.conn,self.conn2) self.conn2.close() #Close connection self.conn.close()
def backup(self, db_file): ################################################################################# # TODO: # 1. Check for trailing '/' ################################################################################# logging.info('Backing up DB file [{0}] to [{1}]'.format(db_file, self._backup_dir)) try: db_con_copy = sqlite3.connect(self._backup_dir + os.path.split(db_file)[1]) sqlitebck.copy(self._db_con, db_con_copy) logging.info('... OK') except Exception as ex: logging.exception(ex)
def connectDB(self): """ Connect to the database """ if(self.db_file): self.conn2=sqlite3.connect(self.db_file,check_same_thread = False) else: self.conn2=sqlite3.connect(self.db_path,check_same_thread = False) #connect with self.conn2: self.conn2.execute("PRAGMA synchronous = OFF") if(self.fastmode): self.conn=sqlite3.connect(':memory:') sqlitebck.copy(self.conn2,self.conn) else: self.conn=self.conn2 self.cur=self.conn.cursor()
def copy_features_tree(self, schema='memory_to_file'): # TODO reduce file disk size if not self.link_to_detdup: return False # 1. copy database backup_conn = sqlite3.connect(self.sqlite3db_path()) current_conn = self.features_tree._meta.database.get_conn() if schema == 'memory_to_file': _from = current_conn; _to = backup_conn if schema == 'file_to_memory': _from = backup_conn ; _to = current_conn if not (_from or _to): raise Exception("schema don't match!") sqlitebck.copy(_from, _to) backup_conn.close() print "loaded %s by %s" % (self.sqlite3db_path(), schema)
def backup(filename): db = database.connect() filePath = os.path.dirname(os.path.realpath(__file__)) config = configparser.ConfigParser() config.read(os.path.join(filePath, 'totale_architektur.config')) path = os.path.join(filePath, config.get('database', 'backupDir')) if not os.path.exists(path): os.makedirs(path) path = os.path.join(path, filename) backup = sqlite3.connect(path) sqlitebck.copy(db, backup) db.close() backup.close()
def backup(dbFile, dbBackupDir): ################################################################################# # TODO: ################################################################################# # 1. Check for ending '/' # 2. Handle possible errors/exceptions from: # a. sqlite3.connect # b. sqlitebck.copy ################################################################################# log.info('{0} Backing up DB file [{1}] to [{2}]'.format(current_thread().name, dbFile, dbBackupDir)) dbCopyConnection = sqlite3.connect(dbBackupDir + os.path.split(dbFile)[1]) sqlitebck.copy(dbConnection, dbCopyConnection) log.info('{0} ... OK'.format(current_thread().name))
def test_copy_multiple_pages(self): db = sqlite3.connect(':memory:') curr = db.cursor() curr.execute('CREATE TABLE foo(bar TEXT)') args = ( 'a' * (2 * SQLITE3_PAGE_SIZE), 'b' * SQLITE3_PAGE_SIZE, ) curr.execute('INSERT INTO foo VALUES (?), (?)', args) db.commit() curr.close() db2 = sqlite3.connect(':memory:') sqlitebck.copy(db, db2, pages=1, sleep=10)
def setup_databases(self, **kwargs): if sqlitebck is not None and 'quickstart' in connections: quickstart = connections['quickstart'] connection = connections['default'] quickstart.ensure_connection() connection.ensure_connection() sqlitebck.copy(quickstart.connection, connection.connection) serialize = connection.settings_dict.get( "TEST", {}).get("SERIALIZE", True) if serialize: serialized = connection.creation.serialize_db_to_string() connection._test_serialized_contents = serialized return 'quickstart' else: return super(QuickstartMixin, self).setup_databases(**kwargs)
def setup_databases(self, **kwargs): if sqlitebck is not None and 'quickstart' in connections: quickstart = connections['quickstart'] connection = connections['default'] quickstart.ensure_connection() connection.ensure_connection() sqlitebck.copy(quickstart.connection, connection.connection) serialize = connection.settings_dict.get("TEST", {}).get( "SERIALIZE", True) if serialize: serialized = connection.creation.serialize_db_to_string() connection._test_serialized_contents = serialized return 'quickstart' else: return super(QuickstartMixin, self).setup_databases(**kwargs)
def _sqlite_copy(file_, restore=False): import sqlite3 as sqlite with Transaction().start(DB_NAME, 0) as transaction, \ sqlite.connect(file_) as conn2: conn1 = transaction.connection if restore: conn2, conn1 = conn1, conn2 if hasattr(conn1, 'backup'): conn1.backup(conn2) else: try: import sqlitebck except ImportError: return False sqlitebck.copy(conn1, conn2) return True
def _sqlite_copy(file_, restore=False): try: import sqlitebck except ImportError: return False import sqlite3 as sqlite with Transaction().start(DB_NAME, 0, _nocache=True) as transaction, \ sqlite.connect(file_) as conn2: conn1 = transaction.connection # sqlitebck does not work with pysqlite2 if not isinstance(conn1, sqlite.Connection): return False if restore: conn2, conn1 = conn1, conn2 sqlitebck.copy(conn1, conn2) return True
def copy_features_tree(self, schema='memory_to_file'): # TODO reduce file disk size if not self.link_to_detdup: return False # 1. copy database backup_conn = sqlite3.connect(self.sqlite3db_path()) current_conn = self.features_tree._meta.database.get_conn() if schema == 'memory_to_file': _from = current_conn _to = backup_conn if schema == 'file_to_memory': _from = backup_conn _to = current_conn if not (_from or _to): raise Exception("schema don't match!") sqlitebck.copy(_from, _to) backup_conn.close() print "loaded %s by %s" % (self.sqlite3db_path(), schema)
def test_copy_from_memory_database(self): source_db = sqlite3.connect(':memory:') self.open_connections.append(source_db) s_curr = source_db.cursor() s_curr.execute('CREATE TABLE foo(bar VARCHAR(20))') for value in ["foo", "bar", "baz"]: s_curr.execute('INSERT INTO foo VALUES (?)', (value, )) source_db.commit() s_curr.execute('SELECT * FROM foo ORDER BY bar DESC') source_data = s_curr.fetchall() s_curr.close() # save database to file db_dest = sqlite3.connect(self.db_filename) sqlitebck.copy(source=source_db, dest=db_dest) # not try to fetch this data from file database self.open_connections.append(db_dest) d_curr = db_dest.cursor() d_curr.execute('SELECT * FROM foo ORDER BY bar DESC') dest_data = d_curr.fetchall() d_curr.close() self.assertEqual(source_data, dest_data)
def dump_to_file(self, location=None): """ Saves to file the current DB to a given location. Useful for in-memory DBs NOTE: it doesn't update self.connection with the new location, so the old db will still be used for further query (intended behaviour since, if it's :memory:, is presumably faster) :param location: complete URL (path+filename) to save the db to :return: """ import sqlitebck if location is None: filename = os.path.join(os.path.expanduser('~'), 'pvtracedb.sql') else: filename = location file_connection = sql.connect(filename) # disable journaling for faster operations cursor = file_connection.cursor() cursor.execute("PRAGMA synchronous = OFF") cursor.execute("PRAGMA journal_mode = OFF") sqlitebck.copy(self.connection, file_connection) self.logger.info("DB copy saved as "+str(filename))
def dump_to_file(self, location=None): """ Saves to file the current DB to a given location. Useful for in-memory DBs NOTE: it doesn't update self.connection with the new location, so the old db will still be used for further query (intended behaviour since, if it's :memory:, is presumably faster) :param location: complete URL (path+filename) to save the db to :return: """ import sqlitebck if location is None: filename = os.path.join(os.path.expanduser('~'), 'pvtracedb.sql') else: filename = location file_connection = sql.connect(filename) # disable journaling for faster operations cursor = file_connection.cursor() cursor.execute("PRAGMA synchronous = OFF") cursor.execute("PRAGMA journal_mode = OFF") sqlitebck.copy(self.connection, file_connection) self.logger.info("DB copy saved as " + str(filename))
def commit_from_RAM(self): if hasattr(self,'old_conn'): self.connection.commit() sqlitebck.copy(self.connection, self.old_conn)
def test_inerited_destination(self): self.create_test_db(self.native) sqlitebck.copy(self.native, self.inherited) curr = self.inherited.cursor() curr.execute('SELECT col FROM t') self.assertEqual(curr.fetchone(), ('test', ))
def load_to_memory(conn): memorydb = sqlite3.connect(":memory:", timeout=60, check_same_thread=False) sqlitebck.copy(conn, memorydb) return memorydb
def update_db(real_data, connector, c, first_time, BACKUP): #CREACION DE TABLAS PH, OD, TEMP. CADA ITEM ES UNA COLUMNA c.execute( 'CREATE TABLE IF NOT EXISTS PH(ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS OD(ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS TEMP(ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) #se guardan las tablas agregados en la db si no existian connector.commit() #INSERCION DE LOS DATOS MEDIDOS #ph=: real_data[1]; OD=: real_data[2], Temp=: real_data[3] try: c.execute("INSERT INTO PH VALUES (NULL,?,?)", (datetime.datetime.now(), real_data[1])) c.execute("INSERT INTO OD VALUES (NULL,?,?)", (datetime.datetime.now(), real_data[2])) c.execute("INSERT INTO TEMP VALUES (NULL,?,?)", (datetime.datetime.now(), real_data[3])) except: #print "no se pudo insertar dato en db" logging.info("no se pudo insertar dato en db") #se guardan los datos agregados en la db connector.commit() #Backup DB in RAM to DISK SD if BACKUP: filedb = '/home/pi/vprocess2/database/backup__' + first_time + '__.db' bck = sqlite3.connect(filedb) sqlitebck.copy(connector, bck) try: os.system( 'sqlite3 -header -csv %s "select * from ph;" > /home/pi/vprocess2/csv/%s' % (filedb, filedb[31:-3]) + 'full_ph.csv') os.system( 'sqlite3 -header -csv %s "select * from od;" > /home/pi/vprocess2/csv/%s' % (filedb, filedb[31:-3]) + 'full_od.csv') os.system( 'sqlite3 -header -csv %s "select * from temp;" > /home/pi/vprocess2/csv/%s' % (filedb, filedb[31:-3]) + 'full_temp.csv') logging.info("\n Backup FULL REALIZADO \n") except: logging.info("\n Backup FULL NO REALIZADO, NO REALIZADO \n") try: #Se guarda el nombre de la db para ser utilizado en app.py f = open(DIR + "name_db.txt", "w") f.write(filedb + '\n') f.close() except: #print "no se pudo guardar el nombre de la DB para ser revisada en app.py" logging.info( "no se pudo guardar el nombre de la DB para ser revisada en app.py" ) return True
def commit_from_RAM(self): if hasattr(self, 'old_conn'): self.connection.commit() sqlitebck.copy(self.connection, self.old_conn)
def dump_dp(self, path): con = sqlite3.connect(path) sqlitebck.copy(self.db, con) con.commit() con.close()
def save_db(): logging.debug("Saving memory db to disk... thread #{}".format(threading.get_ident())) disk_db = sqlite3.connect(TVTROPES_DB_PATH, timeout=60, check_same_thread=False) sqlitebck.copy(get_db_connection(), disk_db) disk_db.close() logging.debug("DB Saved!")
def dump_db(self,dbfile=os.path.join(DATA_DIR,'asx.db')): print("Dumping db to %s" % dbfile) bdb = sqlite3.connect(dbfile) sqlitebck.copy(self._conn,bdb) bdb.close() print("Dumped db to %s" % dbfile)
def main(): topicfilter = "w" #####Listen measures - estructura para zmq listen de ficha_producto ################ port_sub1 = "5554" context_sub1 = zmq.Context() socket_sub1 = context_sub1.socket(zmq.SUB) socket_sub1.connect("tcp://localhost:%s" % port_sub1) socket_sub1.setsockopt(zmq.SUBSCRIBE, topicfilter) #####Listen measures - estructura para zmq listen real_data (measures from myserial.py) port_sub2 = "5557" context_sub2 = zmq.Context() socket_sub2 = context_sub2.socket(zmq.SUB) socket_sub2.connect("tcp://localhost:%s" % port_sub2) socket_sub2.setsockopt(zmq.SUBSCRIBE, topicfilter) #################################################################################### connector = sqlite3.connect(':memory:', detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) c = connector.cursor() time.sleep(1) first_time = time.strftime("Fecha__%d-%m-%y__Hora__%H_%M_%S") BACKUP = True T_SEG = datetime.datetime.now() T_elapsed = datetime.datetime.now() while True: ############## ZMQ connection for download data ficha_producto ############### try: ficha_producto = socket_sub1.recv(flags=zmq.NOBLOCK).split()[1] ficha_producto = ficha_producto.split(",") ficha_producto_save = ficha_producto #ficha_producto_save = ficha_producto #log para depuracion f = open(DIR + "/ficha_producto_basedatos.txt", "a+") f.write(str(ficha_producto) + "__up__" + "\n") f.close() except zmq.Again: pass ############## ZMQ connection for download real_data (measures from myserial.py) try: temporal = socket_sub2.recv(flags=zmq.NOBLOCK).split() if temporal != "": real_data = temporal #f = open(DIR + "/real_data_basedatos.txt","a+") #f.write(str(real_data) + "__up__" + "\n" ) #f.close() except zmq.Again: pass ########################## ZMQ connections ####################################### # CREAR TABLAS SI NO EXISTEN!!!! c.execute( 'CREATE TABLE IF NOT EXISTS PROCESO (ID INTEGER PRIMARY KEY autoincrement, FECHA TIMESTAMP NOT NULL, HORA TIMESTAMP NOT NULL, T_MOSTO REAL, T_SOMBRERO REAL, T_Promedio REAL, T_Setpoint REAL, Flujo_REMONTAJE REAL, Densidad REAL, Yan REAL, pH REAL, Brix REAL, Acidez REAL, Lote REAL, Dosis REAL, Bomba1 REAL, Bomba2 REAL, Electrovalvula_Aire REAL, CEPA REAL, FLUJO_AIRE REAL, co2 REAL)' ) #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA c.execute( 'CREATE TABLE IF NOT EXISTS T_SOMBRERO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS T_MOSTO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS T_PROMEDIO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) logging.info("Se crearon las tablas!!!") #se guardan las tablas agregados en la db si no existian connector.commit() try: real_data_1 = ( float(real_data[3]) + float(real_data[2]) ) / 2 #temperatura promedio entre T_MOSTO y T_SOMBRERO real_data_2 = float(real_data[4]) #medicion de co2 [ppm] #comp1 = float(ficha_producto[12])#round(float(ficha_producto[14])*ficha_producto[12],2) #Densidad_Opt #TASA_Crec #Etanol #sustrato_i #mezclador #bomba1 #bomba2 #Temp_setpoint #Temp_measure #pH_setpoint #pH_measure logging.info("DATOS FLOAT SE insertan datos en db") print "Datos Float obtenidos !!!!" except: print "Datos Float NO fueron obtenidos !!!!" logging.info("no se pudo insertar datos en db ??????????????") #INSERCION DE LOS DATOS MEDIDOS, TABLA FULL CON TODA LA DATA, NULL es para el ID try: c.execute( "INSERT INTO PROCESO VALUES (NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (datetime.datetime.now().strftime("%Y-%m-%d"), datetime.datetime.now().strftime("%H:%M:%S"), round(float(real_data[3]), 2), round(float( real_data[2]), 2), round(real_data_1, 2), ficha_producto[9], ficha_producto[12], ficha_producto[0], ficha_producto[1], ficha_producto[2], ficha_producto[3], ficha_producto[4], ficha_producto[7], ficha_producto[8], ficha_producto[10], ficha_producto[11], ficha_producto[13], ficha_producto[6], ficha_producto[5], real_data_2)) #Insercion solo de los datos de sensores print "TABLA GRANDE OK!!!! en db" except: print "no se pudo insertar datos TABLA GRANDE en db" try: #Insercion solo de los datos de sensores c.execute("INSERT INTO T_MOSTO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[3]), 2))) c.execute("INSERT INTO T_SOMBRERO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[2]), 2))) c.execute("INSERT INTO T_PROMEDIO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[1]), 2))) print "Datos Insertados !!!" logging.info("se insertaron todos los datos en db") #time.sleep(0.5) except: #print "no se pudo insertar dato en db" print "Fallo al Insertar Dato !!!" logging.info("no se pudo insertar datos en db") #se guardan los datos agregados en la db connector.commit() ''' if T_SEG - T_elapsed.second <= 0 : BACKUP = True T_elapsed = 0 else: BACKUP = False T_elapsed = datetime.datetime.now() print BACKUP BACKUP = True if BACKUP : first_time = time.strftime("Fecha__%d-%m-%y__Hora__%H_%M_%S") else: first_time = "__NULO__" ''' #Backup DB in RAM to DISK SD if BACKUP: filedb = '/home/pi/vprocess4c/database/backup__' + first_time + '__.db' bck = sqlite3.connect(filedb) sqlitebck.copy(connector, bck) try: os.system( 'sqlite3 -header -csv %s "select * from T_SOMBRERO;" > /home/pi/vprocess4c/csv/%s' % (filedb, filedb[28:-3]) + 'T_SOMBRERO.csv') os.system( 'sqlite3 -header -csv %s "select * from T_MOSTO;" > /home/pi/vprocess4c/csv/%s' % (filedb, filedb[28:-3]) + 'T_MOSTO.csv') os.system( 'sqlite3 -header -csv %s "select * from T_PROMEDIO;" > /home/pi/vprocess4c/csv/%s' % (filedb, filedb[28:-3]) + 'T_PROMEDIO.csv') os.system( 'sqlite3 -header -csv %s "select * from PROCESO;" > /home/pi/vprocess4c/csv/%s' % (filedb, filedb[28:-3]) + 'PROCESO.csv') logging.info("\n Backup CSV REALIZADO \n") except: logging.info("\n Backup FULL NO REALIZADO, NO REALIZADO \n") try: #Se guarda el nombre de la db para ser utilizado en app.py f = open(DIR + "/name_db.txt", "w") f.write(filedb + '\n') f.close() except: #print "no se pudo guardar el nombre de la DB para ser revisada en app.py" logging.info( "no se pudo guardar el nombre de la DB para ser revisada en app.py" )
def update_db(real_data, ficha_producto, connector, c, first_time, BACKUP): #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA #TABLA FULL CON TODA LA DATA c.execute( 'CREATE TABLE IF NOT EXISTS PROCESO (ID INTEGER PRIMARY KEY autoincrement, FECHA TIMESTAMP NOT NULL, HORA TIMESTAMP NOT NULL, Densidad_Opt REAL, TASA_Crec REAL, Etanol REAL, SUSTRATO_i REAL, MEZCLADOR REAL, BOMBA1 REAL, BOMBA2 REAL, T_SETPOINT REAL, TEMPERATURA REAL, pH_SETPOINT REAL, pH REAL )' ) #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA c.execute( 'CREATE TABLE IF NOT EXISTS TEMPERATURA (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS PH (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS OD (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) logging.info("Se crearon las tablas!!!") #se guardan las tablas agregados en la db si no existian connector.commit() #INSERCION DE LOS DATOS MEDIDOS, TABLA FULL CON TODA LA DATA, NULL es para el ID #Densidad_Opt #TASA_Crec #Etanol #sustrato_i #mezclador #bomba1 #bomba2 #Temp_setpoint #Temp_measure #pH_setpoint #pH_measure try: c.execute( "INSERT INTO PROCESO VALUES (NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (datetime.datetime.now().strftime("%Y-%m-%d"), datetime.datetime.now().strftime("%H:%M:%S"), ficha_producto[0], ficha_producto[1], ficha_producto[2], ficha_producto[3], ficha_producto[4], ficha_producto[10], ficha_producto[11], ficha_producto[9], real_data[1], ficha_producto[8], real_data[3])) #Insercion solo de los datos de sensores c.execute("INSERT INTO PH VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[3]), 2))) c.execute("INSERT INTO OD VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[2]), 2))) c.execute("INSERT INTO TEMPERATURA VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[1]), 2))) logging.info("se insertaron todos los datos en db") except: #print "no se pudo insertar dato en db" logging.info("no se pudo insertar datos en db") #se guardan los datos agregados en la db connector.commit() #Backup DB in RAM to DISK SD if BACKUP: filedb = DIR + '/database/backup__' + first_time + '__.db' bck = sqlite3.connect(filedb) sqlitebck.copy(connector, bck) try: os.system( 'sqlite3 -header -csv %s "select * from PROCESO;" > /home/pi/vprocess6/csv/%s' % (filedb, filedb[28:-3]) + 'PROCESO.csv') logging.info("\n Backup CSV REALIZADO \n") except: logging.info("\n Backup FULL NO REALIZADO, NO REALIZADO \n") try: #Se guarda el nombre de la db para ser utilizado en app.py f = open(DIR + "/name_db.txt", "w") f.write(filedb + '\n') f.close() except: #print "no se pudo guardar el nombre de la DB para ser revisada en app.py" logging.info( "no se pudo guardar el nombre de la DB para ser revisada en app.py" ) return True
def update_db(real_data, ficha_producto, connector, c, first_time, BACKUP): #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA # CREAR TABLAS SI NO EXISTEN!!!! c.execute('CREATE TABLE IF NOT EXISTS PROCESO (ID INTEGER PRIMARY KEY autoincrement, FECHA TIMESTAMP NOT NULL, HORA TIMESTAMP NOT NULL, T_MOSTO REAL, T_SOMBRERO REAL, T_Promedio REAL, T_Setpoint REAL, Flujo_REMONTAJE REAL, Densidad REAL, Yan REAL, pH REAL, Brix REAL, Acidez REAL, Concentracion_FDA REAL, Dosis_FDA REAL, Bomba1 REAL, Bomba2 REAL, Electrovalvula_Aire REAL, FLUJO_FDA REAL, FLUJO_AIRE REAL, CO2 REAL)') #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA c.execute('CREATE TABLE IF NOT EXISTS T_SOMBRERO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)') c.execute('CREATE TABLE IF NOT EXISTS T_MOSTO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)') c.execute('CREATE TABLE IF NOT EXISTS T_PROMEDIO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)') logging.info("Se crearon las tablas!!!") #se guardan las tablas agregados en la db si no existian connector.commit() #INSERCION DE LOS DATOS MEDIDOS, TABLA FULL CON TODA LA DATA, NULL es para el ID #Densidad_Opt #TASA_Crec #Etanol #sustrato_i #mezclador #bomba1 #bomba2 #Temp_setpoint #Temp_measure #pH_setpoint #pH_measure try: #sensor t_sombrero anti-ceros if float(real_data[2]) != 0: real_data_2_save = float(real_data[2]) else: real_data[2] = real_data_2_save #sensor t_mosto anti-ceros if float(real_data[3]) != 0: real_data_3_save = float(real_data[3]) else: real_data[3] = real_data_3_save #sensor c02 anti-ceros if float(real_data[4]) != 0: real_data_4_save = float(real_data[4]) else: real_data[4] = real_data_4_save real_data_1 = (float(real_data[3]) + float(real_data[2]))/2 #temperatura promedio entre T_MOSTO y T_SOMBRERO real_data_2 = float(real_data[4]) #medicion de co2 [ppm] #comp1 = float(ficha_producto[12])#round(float(ficha_producto[14])*ficha_producto[12],2) #Densidad_Opt #TASA_Crec #Etanol #sustrato_i #mezclador #bomba1 #bomba2 #Temp_setpoint #Temp_measure #pH_setpoint #pH_measure #logging.info("DATOS FLOAT SE insertan datos en db") #print "Datos Float obtenidos !!!!" except: pass #print "Datos Float NO fueron obtenidos !!!!" #logging.info("no se pudo insertar datos en db ??????????????") #INSERCION DE LOS DATOS MEDIDOS, TABLA FULL CON TODA LA DATA, NULL es para el ID try: c.execute("INSERT INTO PROCESO VALUES (NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (datetime.datetime.now().strftime("%Y-%m-%d"), datetime.datetime.now().strftime("%H:%M:%S"), round(float(real_data[3]),2), round(float(real_data[2]),2), round(real_data_1,2), ficha_producto[9], ficha_producto[12], ficha_producto[0], ficha_producto[1], ficha_producto[2], ficha_producto[3], ficha_producto[4], ficha_producto[7], ficha_producto[8], ficha_producto[10], ficha_producto[11], ficha_producto[13], ficha_producto[6], ficha_producto[5], real_data_2 )) #Insercion solo de los datos de sensores #print "TABLA GRANDE OK!!!! en db" except: #print "no se pudo insertar datos TABLA GRANDE en db" pass try: #Insercion solo de los datos de sensores c.execute("INSERT INTO T_MOSTO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[3]),2))) c.execute("INSERT INTO T_SOMBRERO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[2]),2))) c.execute("INSERT INTO T_PROMEDIO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), round(float(real_data[1]),2))) #print "Datos Insertados !!!" #logging.info("se insertaron todos los datos en db") #time.sleep(0.5) except: pass #print "no se pudo insertar dato en db" #print "Fallo al Insertar Dato !!!" #logging.info("no se pudo insertar datos en db") #se guardan los datos agregados en la db connector.commit() #Backup DB in RAM to DISK SD if BACKUP: filedb = DIR + '/database/backup__' + first_time + '__.db' bck = sqlite3.connect(filedb) sqlitebck.copy(connector, bck) try: #0s.system('sqlite3 -header -csv %s "select * from T_SOMBRERO;" > /home/pi/vprocess4c/csv/%s' % (filedb,filedb[28:-3])+'T_SOMBRERO.csv' ) #os.system('sqlite3 -header -csv %s "select * from T_MOSTO;" > /home/pi/vprocess4c/csv/%s' % (filedb,filedb[28:-3])+'T_MOSTO.csv' ) #os.system('sqlite3 -header -csv %s "select * from T_PROMEDIO;" > /home/pi/vprocess4c/csv/%s' % (filedb,filedb[28:-3])+'T_PROMEDIO.csv' ) os.system('sqlite3 -header -csv %s "select * from PROCESO;" > /home/pi/vprocess4c/csv/%s' % (filedb,filedb[28:-3])+'PROCESO.csv' ) #logging.info("\n Backup CSV REALIZADO \n") except: pass #logging.info("\n Backup FULL NO REALIZADO, NO REALIZADO \n") try: #Se guarda el nombre de la db para ser utilizado en app.py f = open(DIR + "/name_db.txt","w") f.write(filedb + '\n') f.close() except: pass #print "no se pudo guardar el nombre de la DB para ser revisada en app.py" #logging.info("no se pudo guardar el nombre de la DB para ser revisada en app.py") return True
def update_db(real_data, ficha_producto, connector, c, first_time, BACKUP): #CREACION DE TABLAS TEMP1(Sombrero), TEMP2(Mosto), TEMP_ (promedio). CADA ITEM ES UNA COLUMNA c.execute( 'CREATE TABLE IF NOT EXISTS T_SOMBRERO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS T_MOSTO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) c.execute( 'CREATE TABLE IF NOT EXISTS T_PROMEDIO (ID INTEGER PRIMARY KEY autoincrement, FECHA_HORA TIMESTAMP NOT NULL, MAGNITUD REAL)' ) #TABLA FULL CON TODA LA DATA c.execute( 'CREATE TABLE IF NOT EXISTS PROCESO (ID INTEGER PRIMARY KEY autoincrement, FECHA TIMESTAMP NOT NULL, HORA TIMESTAMP NOT NULL, FUNDO TEXT NOT NULL, CEPA TEXT NOT NULL, T_MOSTO REAL, T_SOMBRERO REAL, T_Promedio REAL, T_Setpoint REAL, Flujo REAL, Densidad REAL, Yan REAL, pH REAL, Brix REAL, Acidez REAL, Lote REAL, Dosis REAL, Bomba1 REAL, Bomba2 REAL)' ) logging.info("Se crearon las tablas!!!") #se guardan las tablas agregados en la db si no existian connector.commit() #INSERCION DE LOS DATOS MEDIDOS #T.SOMBRERO=: real_data[1]; T.MOSTO=: real_data[2], T.PROMEDIO=: real_data[3] try: #Insercion solo de los datos de sensores c.execute("INSERT INTO T_MOSTO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), real_data[3])) c.execute("INSERT INTO T_SOMBRERO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), real_data[2])) c.execute("INSERT INTO T_PROMEDIO VALUES (NULL,?,?)", (datetime.datetime.now().strftime("%Y-%m-%d,%H:%M:%S"), real_data[1])) #TABLA FULL CON TODA LA DATA # NULL es para el ID c.execute( "INSERT INTO PROCESO VALUES (NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (datetime.datetime.now().strftime("%Y-%m-%d"), datetime.datetime.now().strftime("%H:%M:%S"), ficha_producto[5], ficha_producto[6], real_data[3], real_data[2], real_data[1], ficha_producto[9], (float(real_data[8]) * float(ficha_producto[12])), ficha_producto[0], ficha_producto[1], ficha_producto[2], ficha_producto[3], ficha_producto[4], ficha_producto[7], ficha_producto[8], ficha_producto[10], ficha_producto[11])) logging.info("se insertaron todos los datos en db") except: #print "no se pudo insertar dato en db" logging.info("no se pudo insertar datos en db") #se guardan los datos agregados en la db connector.commit() #Backup DB in RAM to DISK SD if BACKUP: filedb = '/home/pi/vprocess4/database/backup__' + first_time + '__.db' bck = sqlite3.connect(filedb) sqlitebck.copy(connector, bck) try: os.system( 'sqlite3 -header -csv %s "select * from T_SOMBRERO;" > /home/pi/vprocess4/csv/%s' % (filedb, filedb[28:-3]) + 'T_SOMBRERO.csv') os.system( 'sqlite3 -header -csv %s "select * from T_MOSTO;" > /home/pi/vprocess4/csv/%s' % (filedb, filedb[28:-3]) + 'T_MOSTO.csv') os.system( 'sqlite3 -header -csv %s "select * from T_PROMEDIO;" > /home/pi/vprocess4/csv/%s' % (filedb, filedb[28:-3]) + 'T_PROMEDIO.csv') os.system( 'sqlite3 -header -csv %s "select * from PROCESO;" > /home/pi/vprocess4/csv/%s' % (filedb, filedb[28:-3]) + 'PROCESO.csv') logging.info("\n Backup CSV REALIZADO \n") except: logging.info("\n Backup FULL NO REALIZADO, NO REALIZADO \n") try: #Se guarda el nombre de la db para ser utilizado en app.py f = open(DIR + "name_db.txt", "w") f.write(filedb + '\n') f.close() except: #print "no se pudo guardar el nombre de la DB para ser revisada en app.py" logging.info( "no se pudo guardar el nombre de la DB para ser revisada en app.py" ) return True