def fileDelete(): files_to_delete = request.form.getlist("files") deeb_data = query_db( "select fname, fhash from audiofiles where fname in ({})".format( ",".join(['?'] * len(files_to_delete))), tuple(files_to_delete)) # Delete from DB deeb = get_db() cur = deeb.cursor() cur.execute( "DELETE FROM audiofiles WHERE fname in ({})".format(",".join( ['?'] * len(files_to_delete))), tuple(files_to_delete)) deeb.commit() close_db() # Delete hash folder that holds all files for dat in deeb_data: shutil.rmtree(os.path.join(UPLOAD_FOLDER, 'test', dat['fhash']), True) retnames = [] deeb_data = query_db("select fname from audiofiles") for dat in deeb_data: retnames.append(dat["fname"]) return jsonify(names=retnames)
def delete_schedule(schedule_id): db = get_db() db.execute('DELETE FROM schedules WHERE id = ?', (schedule_id, )) db.execute('DELETE FROM events WHERE schedule_id = ?', (schedule_id, )) db.commit() close_db() return ('', 204)
def test_create_table(self): conn = connect_db(db_name, table_name) cur = conn.cursor() cur.execute("SELECT name FROM sqlite_master WHERE type='table'") table_list = cur.fetchall() close_db(conn) self.assertEqual(table_list[0][0], table_name)
def deleteProduct(id_producto): close_db() db = get_db() db.execute('DELETE FROM productos WHERE id_producto = {0}'.format(id_producto)) db.commit() close_db() return redirect(url_for('admin'))
def sql_select_images_byUser(id): db = get_db() images = db.execute('SELECT * FROM Image WHERE idUser = ?', [id]).fetchall() db.commit() close_db() return images
def sql_select_most_downloads(): db = get_db() images = db.execute( 'SELECT idImage,Image.name,votes,downloads,path,User.name FROM Image INNER JOIN User ON Image.idUser = User.idUser WHERE Image.status=1 ORDER BY downloads DESC LIMIT 20' ).fetchall() close_db() return images
def sql_select_to_update(id): db = get_db() image = db.execute( 'SELECT idImage,name,description,path,status FROM Image WHERE idImage= ?', [id]).fetchone() return image close_db()
def sql_download_image(id): db = get_db() pathSelect = db.execute('SELECT path FROM Image WHERE idImage = ?', [id]).fetchone() path = pathSelect[0] close_db() return path
def sql_select_image_by_id(id): db = get_db() image = db.execute( 'SELECT idImage,Image.name,description,path,User.name FROM Image INNER JOIN User ON Image.idUser = User.idUser WHERE idImage= ?', [id]).fetchone() return image close_db()
def users(): if g.user[4] != "Administrador": print(g.user[4]) return redirect('/error') try: if request.method == 'GET': db = get_db() users_list = db.execute('SELECT * FROM usuarios;').fetchall() #print(users_list) return render_template('usuarios.html', users=users_list) else: # BUSQUEDA - INICIO close_db() db = get_db() busqueda = request.form['search'] if busqueda is None: users_list = db.execute('SELECT * FROM usuarios;').fetchall() else: query = "SELECT * FROM usuarios WHERE usuario LIKE '%{0}%';".format( busqueda) users_list = db.execute(query).fetchall() #print(product_list) return render_template('usuarios.html', users=users_list) # BUSQUEDA - FIN except Exception as e: print("Ocurrio un eror:", e) return render_template('principalAdmin.html') # DEIZY
def employee(): try: if request.method == 'GET': db = get_db() product_list = db.execute('SELECT * FROM productos;').fetchall() #print(product_list) return render_template('principalEmpleado.html', productos=product_list) else: # BUSQUEDA - INICIO close_db() db = get_db() busqueda = request.form['search'] if busqueda is None: product_list = db.execute( 'SELECT * FROM productos;').fetchall() else: query = "SELECT * FROM productos WHERE nombre LIKE '%{0}%';".format( busqueda) product_list = db.execute(query).fetchall() #print(product_list) return render_template('principalEmpleado.html', productos=product_list) # BUSQUEDA - FIN except Exception as e: print("Ocurrio un eror:", e) return render_template('principalEmpleado.html')
def delete_schedules(): db = get_db() db.execute('DELETE FROM schedules') db.execute('DELETE FROM events') db.commit() close_db() return ('', 204)
def receiveText( ): # Funzione per leggere i messaggi piu recenti(quelli con il campo is_read = 1) che verrano successivamente settati a 0 1=True|0=False conn = open_db() cur = conn.cursor() print( '------------------------------------------M E S S A G G I--------------------------------------------\n' ) if cur.execute('SELECT * FROM Client_messages WHERE is_read = 0').fetchall( ): # Verifico se ci sono nuovi messaggi, se non ce ne sono ritorna lista vuota for msg in cur.execute( 'SELECT * FROM Client_messages WHERE is_read = 0').fetchall( ): # Seleziono quindi tutti i messaggi non letti(is_read = 0) name = cur.execute( 'SELECT name FROM Users WHERE id = ?', (msg[2], )).fetchone()[ 0] # Seleziono il nome del mittente attraverso l'id print( f"{name}[id: {msg[2]}]: {msg[3]} at {datetime.fromtimestamp(float(msg[4])).strftime('%H:%M')}" # effettuo la print finale ) cur.execute( 'UPDATE Client_messages SET is_read = 1 WHERE is_read = 0' ) # setto quindi i messaggi a letto(is_read=1) per non mostrarli in seguito conn.commit() else: print('Non ci sono messaggi nuovi!') print( '\n\n--------------------------------------------------------------------------------------------------' ) close_db(conn)
def upload_df_with_batches(SQL, df, queue=None, batch_size=500): # чтобы cursor.execute() это ел, нужно NaN заменить на None # поскольку согласно stackoverflow, такая замена может вызвать некорректную работу датафрейма, # мы это делаем непосредственно перед загрузкой df = df.where(df.notnull(), None) batches = list(split_df(df, batch_size)) length = len(batches) logging.info( f'ALL ITEMS: {sum([len(batch) for batch in batches])} IN {length} BATCHES' ) logging.info(f'DF SHAPE: {df.shape}') warnings_ = [] if not queue: cur, conn = open_db() logging.info(f"Uploading items") progress_string = 'Uploading dataframe to DB: ' printProgressBar(0, length, prefix=progress_string, suffix='Complete', length=100) for i, batch in enumerate(batches): warnings_ = upload_batch(SQL, batch, cur, conn) printProgressBar(i + 1, length, prefix=progress_string, suffix='Complete', length=100) conn.commit() close_db(cur, conn) else: print('Putting {} to Queue...'.format(batches[0][0]['bkf_filename'])) for batch in batches: queue.put((SQL, batch)) show_1265_warnings(warnings_)
def run(self): logging.info('Started Twitch Thread on: {}'.format(self.channel)) init_db() logging.info('Started db connection') while True: try: msg = self.irc.recv(1024).decode() logging.info( 'New message received from twitch IRC: {}'.format(msg)) msg = msg.split() if msg[0] == 'PING': self.command('PONG', msg[1]) logging.info('PONG cmd answered') if msg[1] == 'PRIVMSG': name = msg[0] phr = ' '.join(msg[3:]) insert_message( (self.channel, name[1:name.find('!')], phr[1:], TwitchThread.create_datetime_now(), TwitchThread.create_datetime_now_second())) logging.info('Collecting message from twitch') except Exception as e: logging.error(e) close_db()
def delete_product_from_order(product_id, order_id): connection = db.get_db() cur = connection.cursor() cur.execute("DELETE FROM details WHERE order_id= ? AND product_id= ?", [order_id, product_id]) connection.commit() db.close_db()
def after(f): """ Closes database connection """ global db database.close_db() return f
def leer_productos(): connection = db.get_db() cur = connection.cursor() cur.execute("SELECT * FROM products") rv = cur.fetchall() db.close_db() return rv
def buscar_un_correo(email): connection = db.get_db() cur = connection.cursor() cur.execute("SELECT * FROM users WHERE email = ?", [email]) rv = cur.fetchone() db.close_db() return rv
def buscar_un_usuario(username): connection = db.get_db() cur = connection.cursor() cur.execute("SELECT * FROM users WHERE username = ?", [username]) rv = cur.fetchone() db.close_db() return rv
def update_order_total(order_id, order_total, order_date): connection = db.get_db() cur = connection.cursor() cur.execute("UPDATE orders SET order_total= ?, order_date= ? WHERE id= ? ", [order_total, order_date, order_id]) connection.commit() db.close_db()
def order_checkout(order_id, order_date): connection = db.get_db() cur = connection.cursor() cur.execute("UPDATE orders SET order_open= 0, order_date= ? WHERE id= ? ", [order_date, order_id]) connection.commit() db.close_db()
def leer_detalles_orden(order_id): connection = db.get_db() cur = connection.cursor() cur.execute("SELECT * FROM details WHERE order_id= ?", [order_id]) rv = cur.fetchall() db.close_db() return rv
def leer_usuarios(): connection = db.get_db() cur = connection.cursor() cur.execute("SELECT * FROM users") rv = cur.fetchall() db.close_db() return rv
def receive(): def dict_factory( cursor, row ): # funzione(presa online) che consente di accedere ai valori attraverso i nomi dei campi d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d conn = open_db() # apro il db conn.row_factory = dict_factory # vedere la funzione cur = conn.cursor() query_parameters = request.args # parametri ricevuti receiver_id = query_parameters.get( 'receiver_id' ) # prendo il receiver_id per prendere solo i messaggi indirizzati al destinatario richiesto messages = cur.execute( 'SELECT * FROM Server_messages WHERE is_received = 0 AND receiver_id = ?', (receiver_id, )).fetchall( ) # assegno a message il dizionario finale da inviare al client # setto la variabile is_received = 1 cur.execute( 'UPDATE Server_messages SET is_received = 1 WHERE receiver_id = ?', (receiver_id, )) conn.commit() close_db(conn) return jsonify(messages) # effettuo il json e lo ritorno
def stop() -> int: try: db.close_db() except db.db_Error as e: print(str(e)) return -1 return 0
def run(args, db_name='main.sqlite', table_name='main'): """ Takes arguments (usually from command-line), parses them, and retrieves the words, filters, and formats them. Returns a formatted string ready to print-to-screen. """ rtn = '' # Parse the arguments options = dealArgs(args).to_object() # Get and filter the words words = getAndFilter(options) word_pairs = getPairs(words) conn = connect_db(db_name, table_name) # If -c argument passed, clear out the table if options['clear']: removed_rows = empty_table(conn, table_name) rtn += f'\nRemoved {removed_rows} rows.' # Add the word-pairs & get the count modified_rows = add_words(conn, word_pairs, table_name) # Close the database close_db(conn) # If nothing saved, give a warning if modified_rows <= 0: rtn += f'\nFailed to save anything' # If something saved, state the count else: rtn += f'\nSuccessfully added {modified_rows} words' return rtn
def buscar_productos(product_name): connection = db.get_db() cur = connection.cursor() cur.execute( f"SELECT * FROM products WHERE product_name LIKE '%{product_name}%'") rv = cur.fetchall() db.close_db() return rv
def create_product(product_name, product_price, filename): connection = db.get_db() cur = connection.cursor() cur.execute( "INSERT INTO products (product_name, product_price, product_filename,enabled) VALUES (?,?,?,'True')", [product_name, product_price, filename]) connection.commit() db.close_db()
def register(usuario, clave, email): connection = db.get_db() cur = connection.cursor() cur.execute( "INSERT INTO users (username, email, password, enabled) VALUES (?,?,?,'True')", [usuario, email, clave]) connection.commit() db.close_db()
def run_table_tickers(): time = lib.now() tickers = get_table_tickers() time_ticker = lib.now() print 'Started Running: %s' % time_ticker count = 0 pages_hit = 0 reached = False try: for ticker in tickers: print ticker pages_hit += run_ticker(ticker) count += 1 lib.sleep(count) db.close_db() finally: print 'Finished. Timing for getting tickers: %s, Timing for getting data: %s, Sleep count: %s, Pages Hit: %s' % (time_ticker - time, lib.now() - time_ticker, count/lib.sleep_mod, pages_hit)
def run_nasdaq_nyse(): nasdaq, nyse = get_nasdaq_nyse_data() time = lib.now() print 'Started Running: %s' % time count = 0 print 'Nasdaq Commencing: %s' % lib.now() for ticker in nasdaq: print ticker run_ticker(ticker) count += 1 lib.sleep(count) nasdaq_time = lib.now() print 'Nasdaq Finished, count: %s' % count print 'NYSE Commencing: %s' % lib.now() for ticker in nyse: print ticker run_ticker(ticker) count += 1 lib.sleep(count) nyse_time = lib.now() print 'NYSE Finished, count: %s' % count db.close_db() print 'Finished. Timing for nasdaq: %s, Timing for nyse: %s' % (nasdaq_time - time, nyse_time - nasdaq_time)
import db import array import numpy as np import markov import pprint db.create_cur() states = db.get_states() data = db.get_pairs_browser() markov = markov.Markov(states, data) final = markov.calc_backwards_prob() pprint.pprint(final) db.close_db()