def slot_sync(self, arg): """Perform a synchronization with a selected database file but as adminstrative user. This allows even a finalized database to be updated. """ sfile = self.settings.getSetting("syncFile") if sfile: sdir = os.path.dirname(sfile) sfile = os.path.basename(sfile) else: sdir = None syncfile = getFile(_("User database file"), startDir=sdir, startFile=sfile, defaultSuffix=".zga", filter=(_("Report Files"), (u"*.zga",))) if not syncfile: return self.settings.setSetting("syncFile", syncfile) dbs = DBs(syncfile) if not dbs.isOpen(): return sdbname = dbs.getConfig(u"dbname") dbs.close() if (self.dbname != sdbname): warning(_("%s: Database name does not match current master name") % syncfile) return self.dlg = Output() synchronize(self.master, syncfile, self.dlg) self.dlg.done()
def find(query, components): conn = DB.getConn() c = conn.cursor() c.execute(query, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")") commitkeywords = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files, keywords) commits.append(c) return commits
def get_pham_colors(): db = DB() results = db.query("SELECT `name`, `color` from `pham_color`"); pham_colors = {} for row in results: pham_colors[str(row[0])] = row[1] return pham_colors
def saveinDB(data): # Connect to MySQL database db = DB(config.db['host'], config.db['database'], config.db['user'], config.db['password']) data['table'] = config.db['table_resol'] db.execute("INSERT INTO `%(table)s`(`time`, `t1`, `t2`, `t3`, `p1`, `relais`, `flags`, `errors`, `rt1`) VALUES " "(NULL, '%(temp1)d', '%(temp2)d', '%(temp3)d', '%(pump1)d', '%(relais)d', '%(flags)d', '%(errors)d', '%(r1time)d')" % data)
def main(hostname, port): db = DB() for size in sizes: for i in range(0, n): print '\nTamaño %d, medición %d/%d...' % (size, i + 1, n) t, retransmissions = transfer(hostname=hostname, port=port, size=size) db.register(DB.SIZE, t, retransmissions, size)
def logTerms(ip, keywords): insertSQL = "INSERT INTO " + DB.searchqueries._table + "(timestamp, ip, terms) " + \ "VALUES(%s, INET_ATON(%s), %s) " conn = DB.getConn() c = conn.cursor() DB.execute(c, insertSQL, (int(time.time()), ip, keywords)) conn.commit()
def insertDB(fileName): try: db = DB("test",None); sqlString = "INSERT INTO collecting_data(`source`, `file_name`) VALUES('%s', '%s')" % ('animu', fileName) print(sqlString) db.insert(sqlString); except Exception: print("insert error") return
def update(_id, **params): update_data = {} if params.get("name"): update_data["name"] = params["name"] if not update_data: return False _DB = DB.query(Isp).filter(Isp.id == _id).update(update_data) DB.commit() return _DB
def upgrade(app): """ data migration to update the Tag.count fields """ db_session = DB(app.config['DATABASE']).get_session() existing_tags = db_session.query(Tag).all() for tag in existing_tags: tag.count = db_session.query(Post, Status).filter(and_(Post.status_id==Status.id, Status.value=='public', )).filter(Post.tags.contains(tag)).count() db_session.commit()
def upgrade(app): """ data migration to update the Tag.count fields """ db_session = DB(app.config['DATABASE']).get_session() existing_posts = db_session.query(Post).all() for post in existing_posts: post.summary_html = markup_to_html(post.format, post.summary) post.content_html = markup_to_html(post.format, post.content) db_session.commit()
def _init_complement(tpe, name, identifier, options): db = DB() logging.debug(_("[core] load %(type)s %(name)s") % {'type': tpe, 'name': name}) db_coll = db.collection("%s_%s_%s" % (tpe, name, identifier)) imp = __import__("emma.%s.%s" % (tpe, name)) complements = getattr(imp, tpe) complement = getattr(complements, name) clss = getattr(complement, name) return clss(identifier, options, db_coll)
def run(self): if(os.path.splitext(self.f)[1]==".xml" and not re.search("out",self.f)): db=DB("/home/poa32kc/Programs/satgen/satgen.db") object_id=db.get_sat_id_by_name(self.ex.get_name(self.f)) args = shlex.split(self.command_line) CR = subprocess.call(args) from time import sleep sleep(1) config_tuple = self.ex.convert_to_tuple(object_id, ex.get_root()+self.f+"_out_sim.xml") db.insert_final_state(config_tuple)
def create(**params): insert_data = {} if params.get("name"): insert_data["name"] = params["name"] if not insert_data: return False _DB = Isp(**insert_data) DB.add(_DB) DB.commit() return _DB
def run(self): if(os.path.splitext(self.f)[1] == ".xml" and not re.search("out", self.f)): db = DB(self.db) object_id = db.get_sat_id_by_name(self.ex.get_name(self.f)) args = shlex.split(self.command_line) CR = subprocess.call(args) from time import sleep sleep(1) config_tuple = self.ex.convert_to_tuple( object_id, ex.get_root() + self.f + "_out_sim.xml")
def main(hostname, port, initial_delay, initial_loss): db = DB() for loss in losses: if loss < initial_loss: continue for delay in delays: if loss == initial_loss and delay < initial_delay: continue for i in range(0, n): print '\nDelay %f, probabilidad de pérdida %f, medición %d/%d...' \ % (delay, loss, i + 1, n) t, retransmissions = transfer(hostname=hostname, port=port, size=size, delay=delay, loss=loss) db.register(DB.DELAY_AND_LOSS_PROBABILITY, t, retransmissions, size, delay=delay, loss=loss)
def setup_transfomers(): db = DB() transformers = { "planning_region": { "table": "attrs_bra_pr", "key": "bra_id", "value": "pr_id" } } db_converters = {colname: db.make_dict(**settings) for colname, settings in transformers.items()} return db_converters
def __init__(self, read_only = True, auto_commit = False, timeout = 5, auto_connect = False, max_idle_time = 28800): '''Initialize the Mydb object.''' # Get the database parameters args = {'host':'127.0.0.1', 'user':'******','passwd':'xxxx','db':'goodcode','port':3306,'charset':'utf8'} # Set extra connection parameters args['connect_timeout'] = timeout args['auto_commit'] = auto_commit args['max_idle_time'] = max_idle_time args['auto_connect'] = auto_connect DB.__init__(self, **args)
def get_pham_number(phage_name, gene_number): try: db = DB() results = db.query("SELECT pham.name \n\ FROM gene JOIN pham ON gene.GeneID = pham.Gene \n\ JOIN phage ON gene.PhageID = phage.PhageID \n\ WHERE phage.Name LIKE %s AND gene.Name LIKE %s \n\ ESCAPE '!'", (phage_name+"%", '%'+str(gene_number))) row = results[0] pham_no = row[0] return str(pham_no) except: raise StarteratorError("Gene %s of Phage %s not found in database!" % (gene_number, phage_name))
def on_status(self, status): print ("Adding " + str(status.id)) try: DB.save(status) except Exception as e: print("Exception: " + str(e)); f = open("exception.log", "a"); f.write("Exception: " + str(e) + "\n"); f.close(); except: print("Unknown error") return
def init(self, gui, file): self.db = None # indicates 'not initialized' self.gui = gui self.printer = None # Open a database and initialize the widgets if file: db = DB(file) if db.isOpen(): db.init() self.open(db) else: self.slot_open(force=False)
class Productor( Thread ): def __init__(self): self.db = DB() Thread.__init__(self) def link_id(self,link): return int(link.split('?')[1][3:]) def run(self): while True: try: maxid = self.db.news_list()[0]['id'] except: maxid = 1 print(maxid) client = HTTPClient() response = client.fetch('http://cs.hust.edu.cn/rss') result = response.body.decode("utf-8",errors='ignore') soup = BeautifulStoneSoup(result) items = soup.find_all('item') for item in items: title = item.title.text link = item.link.text desc = item.description.text linkid = self.link_id(link) if linkid > maxid: result = self.db.add_news(linkid,title,desc,link) if result: result = self.get_article(link) else: break time.sleep(3600) def get_article(self,link): client = HTTPClient() response = client.fetch(link) result = response.body.decode('utf-8',errors='ignore') soup = BeautifulSoup(result) a = soup.find('div',class_='neirong') title = a.find('div',class_='show_title').text date = a.find('span',class_='ari10').text content = a.find('div',class_='show_cont').text linkid = self.link_id(link) self.db.add_new(linkid,title,content,date,link) #insert into the database return 1
def update(uid, **params): update_data = {} if params.get("passwd"): update_data["passwd"] = params["passwd"] if params.get("real_name"): update_data["real_name"] = params["real_name"] if params.get("status"): update_data["status"] = params["status"] if not update_data: return False _DB = DB.query(Users).filter(Users.id == uid).update(update_data) DB.commit() return _DB
def _delay(event, data, seconds, date, doc_id=None): db = DB() core = db.core() if not doc_id: doc = {'element': 'sched', 'type': 'at', 'event': dumps(event.elements()), 'data': dumps(data), 'date': date} doc_id = core.insert(doc) if seconds > 0: sleep(float(seconds)) trigger(event, data) core.remove(doc_id)
def __init__(self, queue): Thread.__init__(self, name="Thread-DB") self.queue = queue self.db = DB() self.logit = logging.getLogger("logit") self.to_commit = Queue() self.commit_size = 1
def run(self): logger.debug('Starting task GeneratePlaylists') self.app.task_cmd('task_start', message='Generating playlists ...') playlist_dir = appconf.playlists_dir() videodirlist = os.listdir(os.path.join(playlist_dir, 'video')) audiodirlist = os.listdir(os.path.join(playlist_dir, 'audio')) db = DB.new_connection() pls = db.playlist_list() for pl in pls: audiolist = [] videolist = [] videos = db.playlist_video_list(pl['id']) for video in videos: filename = self.get_filename(video['id'], videodirlist) if filename: videolist.append(os.path.join('video', filename)) filename = self.get_filename(video['id'], audiodirlist) if filename: audiolist.append(os.path.join('audio', filename)) videopl = '{0}_video.m3u'.format(pl['title']) audiopl = '{0}_audio.m3u'.format(pl['title']) vpath = os.path.join(playlist_dir, videopl) apath = os.path.join(playlist_dir, audiopl) pl = PlaylistFile(videolist) pl.save(vpath) pl = PlaylistFile(audiolist) pl.save(apath) logger.debug('Done task GeneratePlaylists') self.app.task_cmd('task_stop', message='Done.')
def save(self): if not self.initialized: raise Exception("called save on unitialized Commit object") conn = DB.getConn() c = conn.cursor() sql = "INSERT INTO " + DB.commit._table + """(repoid, date, message, uniqueid) VALUES(%s, %s, %s, %s) ON DUPLICATE KEY UPDATE uniqueid = VALUES(uniqueid)""" c.execute(sql, (self.repo.id, self.date, self.message, self.uniqueid)) if self.commitid <= 0: self.commitid = conn.insert_id() if self.files: sql = "DELETE FROM " + DB.commitfile._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitfile._table + "(commitid, file) " for f in self.files: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, self.files) if self.dbkeywords: sql = "DELETE FROM " + DB.commitkeyword._table + " WHERE commitid = " + str(self.commitid) c.execute(sql) sql = "INSERT INTO " + DB.commitkeyword._table + "(commitid, keyword) " for f in self.dbkeywords: sql += "SELECT " + str(self.commitid) + ", %s UNION " sql = sql[:-6] c.execute(sql, [x for x in self.dbkeywords]) conn.commit()
def create(**params): insert_data = {} if not params.get("uname") or not params.get("passwd"): return False insert_data["uname"] = params["uname"] insert_data["passwd"] = params["passwd"] if params.get("real_name"): insert_data["real_name"] = params["real_name"] if params.get("status"): insert_data["status"] = params["status"] _DB = Users(**insert_data) DB.add(_DB) DB.commit() return _DB
def get_one(user_id): """ get user information by user_id :param user_id: :return: """ return DB.query(Users).filter(Users.id == user_id).first()
def get_by_recipe_id(recipe_id): with DB() as db: rows = db.execute( ''' SELECT * FROM ingredients WHERE recipe_id = ? ''', (recipe_id, )).fetchall() if rows: return [Ingredient(*row) for row in rows]
def get_one(isp_id): if isinstance(isp_id, int) is False: try: isp_id = int(isp_id) except ValueError: return False if isp_id == 0: return False return DB.query(Isp).filter(Isp.id == isp_id).scalar()
def create(self): with DB() as db: values = (self.email, self.password, self.name, self.address, self.mobile) db.execute( ''' INSERT INTO users (email, password, name, address, mobile) VALUES (?, ?, ?, ?, ?)''', values) return self
def main(self): logging.error('Start Sheet API') db = DB() self.set_service() while True: self.create_sheet(db) self.change_sheet_id(db) self.add_data(db) time.sleep(0.2)
def save(self): with DB() as db: values = (self.name, self.description, self.article.id, self.file_path, self.user_id, self.id) db.execute( '''UPDATE posts SET name = ?, description = ?, article_id = ?, file_path = ?, user_id =? WHERE id = ?''', values) return self
def get_one(business_id): if isinstance(business_id, int) is False: try: business_id = int(business_id) except ValueError: return False if business_id == 0: return False return DB.query(Business).filter(Business.id == business_id).scalar()
def create(self): with DB() as db: values = (self.name, self.description, self.article.id, self.file_path, self.user_id) db.execute( '''INSERT INTO posts (name, description, article_id, file_path, user_id) VALUES (?, ?, ?, ?, ?)''', values) return self
def delete(self): with DB() as db: db.execute( ''' DELETE FROM tasks WHERE title = ? ''', (self.title, )) return self
def find_by_id(id): if not id: return None with DB() as db: row = db.execute('SELECT * FROM users WHERE id = ?', (id, )).fetchone() if row: return User(*row) return None
def get_scraper_info_by_id(id): info = DB.query_assoc( "SELECT scraper_id, name, enabled FROM scrapers WHERE scraper_id=?", [id], force_double_array=False) if info: return info else: return False
def getNaturopathy(self): code = 200 status = False message = "" responseData = {} # where = {} find_naturopathy_arr = {} if request.args.get( "naturopathy_id") and request.args.get("naturopathy_id") != "": naturopathy_id = ObjectId(request.args.get("naturopathy_id")) find_naturopathy = DB.find_one(tbl_v028_meditation_naturopathy, {"_id": naturopathy_id}, 'ALL') find_cat = DB.find_all_where( tbl_v007_services_master, {"category_id": ObjectId(find_naturopathy['category_id']['$oid'])}, 'ALL') accomodation_title = find_naturopathy['accomodations'] if (find_naturopathy): find_naturopathy_arr['data'] = find_naturopathy for x in accomodation_title: title_id = x['accomodation_id'] find_title = DB.find_one(tbl_v023_accomodation, {"_id": ObjectId(title_id)}, { "title": 1, '_id': 0 }) x["title"] = find_title["title"] find_naturopathy_arr['cat'] = find_cat code = 200 status = True message = MSG_CONST.VENDOR_SERVICES_SUCCESS else: find_naturopathy_arr = [] code = 201 status = False message = MSG_CONST.VENDOR_OUTLET_NOT_FOUND else: find_naturopathy_arr = [] code = 201 status = False message = "something went wrong" response = output_json(find_naturopathy_arr, message, status, code) logging.debug('vendor_services_list: {}'.format(response)) return response
def getMap(): global map if not map: conn = DB.getConn() c = conn.cursor() c.execute("SELECT * FROM " + DB.keyword._table) rows = c.fetchall() map = pygraph.classes.digraph.digraph() for r in rows: keyword = r[DB.keyword.keyword].lower() keyword_regex = re.compile( '(?<=[^a-zA-Z])' + keyword + '(?![a-zA-Z])') # k is not surrounded by alpha characters. parent = r[DB.keyword.parent] parent_regex = '' if not parent else re.compile('(?<=[^a-zA-Z])' + parent.lower() + '(?![a-zA-Z])') type = r[DB.keyword.type] if not map.has_node(keyword): map.add_node(keyword, [('type', type), ('regex', keyword_regex)]) else: pass #We take the first type defined, on the assumption that an APICALL will not also be something else # And that a MAPPING will not also be standard if parent and not map.has_node(parent): map.add_node(parent, [('type', KeywordType.STANDARD), ('regex', parent_regex)]) #We define a parent tag as being standard. Really it should be max(existing, type) but that requires a logical definition of # increasing values of type - which we don't do if parent: map.add_edge((keyword, parent), label=type) c.execute("SELECT tagname FROM " + DB.repo._table) rows = c.fetchall() for r in rows: keyword = r[0].lower() keyword_regex = re.compile('(?<=[^a-zA-Z])' + keyword + '(?![a-zA-Z])') parent = 'project-' + keyword parent_regex = re.compile('(?<=[^a-zA-Z])' + parent + '(?![a-zA-Z])') type = KeywordType.APICALL #apply only the project tag, not the non-project tag if not map.has_node(keyword): map.add_node(keyword, [('type', type), ('regex', keyword_regex)]) if not map.has_node(parent): map.add_node(parent, [('type', KeywordType.STANDARD), ('regex', parent_regex)]) map.add_edge((keyword, parent), label=type) return map
def billing(self, vendor_id): billing_data = DB.find_by_key( tbl_v016_subscription, {"$and": [{ "vendor_id": ObjectId(vendor_id), "status": int(1) }]}, {}, [("index", pymongo.DESCENDING)]) billing_arr = [] for x in billing_data: membership = DB.find_one(tbl_v015_membership, {"_id": ObjectId(x['plan_id']['$oid'])}, { "title": 1, "_id": 0, "price": 1 }) if billing_data and membership and "title" and "price" in membership: x["title"] = membership["title"].title() billing_arr.append(x) return output_json(billing_arr, True, 200)
def create_persons_table(): with DB('persons_interests.sql3') as connection: sql_str = """CREATE TABLE IF NOT EXISTS persons ( PersonId INTEGER PRIMARY KEY AUTOINCREMENT, FirstName VARCHAR(100), LastName VARCHAR(100) )""" cursor = connection.cursor() cursor.execute(sql_str) connection.commit()
def sign_up(): if not valid_request(request, ['first_name', 'last_name', 'email', 'password']): return abort(400) first_name = request.form['first_name'] last_name = request.form['last_name'] email = request.form['email'] password = request.form['password'] salt = secrets.token_bytes(16) master_key = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 100000) DB.create_user(first_name, last_name, email, base64.b64encode(salt).decode('utf-8'), base64.b64encode(master_key).decode('utf-8')) return redirect(f'{URL}/sign-in/sign-in.html', 303)
def find_by_username(username): if not username: return None with DB() as db: row = db.execute( 'SELECT * FROM users WHERE username = ?', (username,) ).fetchone() return User(*row)
def create(self): with DB() as db: values = ( self.first_name, self.last_name, self.birth_year ) db.execute(''' INSERT INTO actor (first_name, last_name, birth_year) VALUES (?, ?, ?) ''', values) return self
def find(id): with DB() as database: row = Adapter.adapt_query(database.execute('''SELECT * FROM questions WHERE id = ?''', (id,)).fetchall()) try: row = tuple(Adapter.adapt_question_rows(database, row)[0]) except IndexError as error: return None return Question(*row)
def __init__(self, get_data, axial_images, coronal_images, sagittal_images, name, age, blood, note, time): super(ExamineThread, self).__init__() self.signals = ExamineSignals() self.get_data = get_data self.examine_result_abnormal = 0 self.examine_result_acl = 0 self.examine_result_men = 0 self.axial_images = axial_images self.coronal_images = coronal_images self.sagittal_images = sagittal_images self.name = name self.age = age self.blood = blood self.note = note self.time = time self.db = DB()
def find_product(id): with DB() as db: product = db.execute( ''' SELECT id, title, content, price FROM products WHERE id = ? ''', (id, )).fetchone() return Product(*product)
def load(mail, password): with DB() as db: values = db.execute( "SELECT * from Users WHERE mail = ? AND password = ? ", ( mail, password, )).fetchone() if not values: return None return User(*values)
def genOtp(length=6, collection="", where={}, otp_field="otp"): # get unique digits OTP = random_n_digits(length) where[otp_field] = OTP result = DB.find_by_key(collection, where) if not result: return OTP else: genOtp(length, collection, where)
async def on_raw_reaction_add(self, payload): try: user: discord.User = self.bot.get_user(payload.user_id) member: discord.Member = Settings.guild.get_member(payload.user_id) except Exception: return if user == self.bot.user or not user: return if payload.channel_id != Settings.submission_channel.id: return if str(payload.emoji) == Settings.accept_emoji: # Accept submission submission, user_id = DB.accept_submission(payload.message_id) if not submission: return try: msg = await Settings.submission_channel.fetch_message(payload.message_id) await msg.delete() other_user = self.bot.get_user(user_id) await other_user.send(embed=success_embed("You submission with ID `{}` was accepted!".format(submission.post_id))) return await Logger.accepted_submission(user, submission) except Exception as e: logging.error( "Failed to delete submission message and/or notify user.\n" + str(e)) return elif str(payload.emoji) == Settings.reject_emoji: # Reject submission submission, user_id = DB.get_submission(payload.message_id) if not submission: return try: msg = await Settings.submission_channel.fetch_message(payload.message_id) await msg.delete() other_user = self.bot.get_user(user_id) await other_user.send(embed=error_embed("You submission with ID `{}` was denied!".format(submission.post_id))) return await Logger.rejected_submission(user, submission) except Exception as e: logging.error( "Failed to delete submission message and/or notify user.\n" + str(e)) return
def buy(self, buyer_id): with DB() as db: db.execute( 'UPDATE advertisements SET buyer_id = ?, is_available = ? WHERE id = ?', ( buyer_id, 0, self.id, )) return self
class ci_nodes(): def __init__(self): self.record = DB('paic_reports', 'ci_nodes') def select_by(self, **where): return [ ci_nodes_Records(row) for row in self.record.select( 'id_nodes', 'name', 'status', 'max', 'comment', 'workspace', 'date_created', 'date_updated').where(**where).submit() ]
def get_task_type_by_title(title): if not title: return None with DB() as db: return db.execute( ''' SELECT * FROM task_types WHERE title = ? ''', (title, )).fetchone()
def create(self): with DB() as db: values = (self.name, self.genre, self.release_year, self.duration, self.description, self.rating, self.director_name) db.execute( ''' INSERT INTO movie (name, genre, release_year, duration, description, rating, director_name) VALUES (?, ?, ?, ?, ?, ?, ?) ''', values) return self
def main(): with DB('customers.sql3') as connection: sql_str = """CREATE TABLE orders ( OrderId INTEGER PRIMARY KEY AUTOINCREMENT, OrderDate DATE, CustomerId INTEGER )""" cursor = connection.cursor() cursor.execute(sql_str) connection.commit()
def delete(self): with DB() as db: db.execute( ''' DELETE FROM users WHERE email = ? ''', (self.email,) ) return self
def verify_password(self, password, email): with DB() as db: passwd = db.execute( ''' SELECT password FROM users WHERE email = ? ''', (email, )).fetchone()[0] return passwd ==\ hashlib.sha256(password.encode('utf-8')).hexdigest()
def get_task_type_title_by_id(task_type_id): if not task_type_id: return None with DB() as db: return db.execute( ''' SELECT title FROM task_types WHERE id = ? ''', (str(task_type_id), )).fetchone()[0]
def delete_sale(id): sale = Sale.find(id) shutil.rmtree(sale.file_path) with DB() as db: db.execute('DELETE FROM comments WHERE sale_id = ?', (sale.id, )) sale.delete() logging.info('%s with id: %s deleted sale %s', User.find_by_id(session['USERNAME']), session['USERNAME'], sale.id) return redirect('/')