def cart(): me_id = session.get("USERID") if me_id is None: return redirect(url_for('login')) me = User.query.filter_by(id=me_id).first() sql_get_cart = '''SELECT I.imgurl AS img, I.name AS name, L.price AS price, C.amount AS amount, C.id AS id, W.street AS street, W.city AS city, W.zip AS zip, W.state AS state, I.id AS item_id FROM carts C, listings L, items I, warehouses W WHERE C.user_id = :id AND C.listing_id = L.id AND L.warehouse_id = W.id AND L.item_id = I.id''' cart_items = engine.execute(sql_get_cart, id=me_id) count_items = engine.execute(sql_get_cart, id=me_id) rows = [r[0] for r in count_items] num = len(rows) cart_copy = engine.execute(sql_get_cart, id=me_id) return render_template('cart.html', items=cart_items, items2=cart_copy, num=num, name=Name(), type=Type(), categories=Cats(), me=me, regions=regions)
def db_delete_translation(uid): """ Delete a translation from the database. :param uid: unique identifier associated with each translation (attributed and returned by the API) """ delete = translations.delete().where(translations.columns.uid == uid) engine.execute(delete)
def setup(): client = TestClient(app) alembic.config.main(argv=["upgrade", "head"]) session = Session(bind=engine) yield {"app": client, "db": session} for tbl in reversed(Base.metadata.sorted_tables): engine.execute(tbl.delete()) session.close()
def document_upload(): if request.method == 'POST': tags = (request.form.getlist('tags')) user_access_exceptions = (request.form.getlist('user_exceptions')) user_name_id_dict = {'name': [name for name in user_access_exceptions if not name.isdigit()], 'id':[int(id) for id in user_access_exceptions if id.isdigit()]} tag_text_id_dict = {'text': [text for text in tags if not text.isdigit()], 'id':[int(id) for id in tags if id.isdigit()]} saved_tags = [] if tag_text_id_dict['id']: saved_tags = Tag.query.filter(Tag.id.in_(tag_text_id_dict['id'])).all() user_exceptions = [] if user_name_id_dict['id']: user_exceptions = User.query.filter(User.id.in_(user_name_id_dict['id'])).all() if request.form.get('document_name') and request.form.get('tags') and 'file' in request.files: file = request.files['file'] if not file: flash("No file included in upload", "error") doc_hash = get_document_hash(file) # getting the hash brings us to the end of the file - reset to the beginning file.stream.seek(0) doc_name = doc_previously_uploaded(doc_hash) if not doc_name: saved_file_name = file_upload(request.files['file'], app.config['DOCUMENT_UPLOAD_FOLDER'], app.config['ALLOWED_DOCUMENT_EXTENSIONS']) todays_date_time = datetime.datetime.today() #here is where the item will be added to the beanstalk queue #document_classifier = Classifier(app.config['DOCUMENT_UPLOAD_FOLDER'] + '/' + saved_file_name) predicted_category = 'pending'#document_classifier.classify_document() document = Document(document_name=request.form['document_name'], file_name=saved_file_name, uploader=session['username'], upload_date=todays_date_time, version="1.0", last_edited_by=session['username'], archived=False, hash=doc_hash, file_extension=saved_file_name.split('.')[-1], access_level=request.form.getlist("access_level")[0], major_category=predicted_category) for tag in tag_text_id_dict['text']: document.tags.append(Tag(tag)) for exception in user_exceptions: document.user_access_exceptions.append(exception) if saved_file_name: db_session.add(document) db_session.commit() upload_event = ChangeLog(document.id, todays_date_time, "Initial upload of document", session['username'], document.document_name, document.file_name, "1.0") db_session.add(upload_event) db_session.commit() flash("Document uploaded successfully", 'success') else: os.remove(app.config['DOCUMENT_UPLOAD_FOLDER'] + '/' + saved_file_name) flash("Document upload failed. Please contact an administrator", 'error') if len(saved_tags) > 0: for tag in saved_tags: engine.execute("INSERT INTO DocumentTag (tag_id, document_id) VALUES ({0}, {1})".format(tag.id, document.id)) else: flash("This document has been uploaded previously. Please search for the following document: {0}".format(doc_name), 'error') else: flash("Not all required fields were filled out. Please ensure all fields have been filled and try again") return render_template('document_upload.html')
def recreate_db(): engine.execute( """SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'test' AND pid <> pg_backend_pid();""" ) # this is needed because other sessions are in different containers Base.metadata.drop_all(bind=engine) Base.metadata.create_all(engine) time.sleep(0.1)
def update(query=None, permission=None): """ Updating Account_User Table :param permission: :param query: :return: """ sql_query = account_user_association_table.update().where( account_user_association_table.c.account_id == query['account_id']).where( account_user_association_table.c.user_id == query['user_id'])\ .values(permission=permission) engine.execute(sql_query)
def reset_game(): for tbl in reversed(Base.metadata.sorted_tables): engine.execute(tbl.delete()) #Database seeding u = ingredient('Citron', 0.1, False, True) a = ingredient('Eau', 0, False, True) db_session.add(ingredient('Café', 0.5, False, False)) db_session.add(ingredient('Chocolat', 0.22, False, False)) db_session.add(ingredient('Rhum', 6, True, True)) db_session.add(ingredient('Sucre', 0.1, False, False)) db_session.add(ingredient('Alcool', 10, True, True)) db_session.add(ingredient('Jus d\'orange', 0.5, False, True)) db_session.add(ingredient('Gin', 0.3, True, True)) db_session.add(ingredient('Vodka', 5, True, True)) db_session.add(ingredient('Jus de grenadine', 0.2, False, True)) db_session.add(ingredient('Sirop de menthe', 0.2, False, True)) db_session.add(ingredient('Jus de banane', 0.33, False, True)) db_session.add(ingredient('Glaçons', 0.02, False, True)) db_session.add(ingredient('Sel', 0.1, False, True)) db_session.add(ingredient('Colorant', 0.2, False, True)) db_session.add(ingredient('Infusion de pêche', 0.6, False, True)) db_session.add(ingredient('Feuilles de thé', 2, False, False)) db_session.add(ingredient('Feuilles de Kola', 5, False, True)) db_session.add(ingredient('Lait', 1, False, False)) db_session.add(u) db_session.add(a) x = recette("Limonade") x.ingredients.append(u) x.ingredients.append(a) db_session.add(x) c = carte(1000, 1000) today = datetime.now() j = journee(today) c.journees.append(j) db_session.add(c) db_session.add(j) db_session.commit() json_model.currentDay = 0 json_model.availablesItems = [] json_model.tomorrowActions = {} json_model.nbVentesPlayer = {} json_model.lastMessages = [] return "Success", 200, {'Content-Type': 'application/text'}
def update_balance(): me_id = session.get("USERID") if me_id is None: return redirect(url_for('login')) to_add = request.args.get("added_balance") sql_update_balance = text('''UPDATE users SET balance = balance + :add WHERE id = :id''') engine.execute(sql_update_balance, add=to_add, id=me_id) return redirect(url_for('wallet'))
def cache_gene_locations(self, strain_config): print("Caching gene locations...") start = 0 while(True): sql = ( "SELECT " " transcript.gene_id, " " feature.chromosome_id, " " feature.direction, " " MIN(start) min_start, " " MAX(end) max_end " "FROM feature, transcript " "WHERE feature.transcript_id = transcript.id " "AND feature.strain_id = 'Col_0' " "GROUP BY transcript.gene_id " "LIMIT "+str(start)+", "+str(self.gene_location_chunk_size)) results = engine.execute(sql) if results.rowcount == 0: break for row in results: db_session.add(GeneLocation( gene_id=row["gene_id"], strain_id=strain_config["name"], chromosome_id=row["chromosome_id"], start=row["min_start"], end=row["max_end"], direction=row["direction"] )) start += self.gene_location_chunk_size db_session.commit()
def updateBooking(self, booking_id, data): userIdVal = data.get('userId') movieIdsVal = data.get('movieIds') pubDateVal = data.get('pubDate') movieIdsString = ','.join(map(str, movieIdsVal)) command = bookings.update().where(bookings.c.id==booking_id).values(userId=userIdVal,movieIds=movieIdsString,pubDate=pubDateVal) resultsProxy = engine.execute(command)
def createNewBooking(self, data): userIdVal = data.get('userId') movieIdsVal = data.get('movieIds') pubDateVal = data.get('pubDate') movieIdsString = ','.join(map(str, movieIdsVal)) command = bookings.insert().values(userId=userIdVal,movieIds=movieIdsString,pubDate=pubDateVal) resultsProxy = engine.execute(command)
def browse(): me_id = session.get("USERID") print(me_id) if me_id is None: return redirect(url_for('login')) cat = request.args.get('cat', 'ALL') incats = InCat.query.join(Category).filter_by(name=cat) sql_item_in_cat = '''SELECT * FROM items I WHERE EXISTS (SELECT * FROM categories C, inCategory A WHERE I.id=A.item_id and C.id=A.cat_id and C.name = :1)''' items = engine.execute(sql_item_in_cat, cat) if cat == 'ALL': incats = InCat.query.all() items = Item.query.all() return render_template('browse.html', cats=Category.query.all(), incats=incats, items=items, me=User.query.filter_by(id=me_id).first(), name=Name(), type=Type(), categories=Cats())
def process_csv(): token_result = validate_jwt(request.headers.get('Authorization')) if token_result == 401: return "Unauthorized", 401 if request.method == 'GET': result = engine.execute( """ SELECT id, filename, keywords, created, (SELECT COUNT(*) >= f.keywords FROM data WHERE file_id=f.id) AS status FROM file f WHERE user_id=%s ORDER BY created DESC; """, [token_result["sub"]]) converted_result = [list(row) for row in result] if converted_result: return jsonify(converted_result), 200 return "Not Found", 404 elif request.method == 'POST': if not find_user(token_result['sub']): return "Unauthorized", 401 request_body = request.json new_file = File(user_id=token_result["sub"], filename=request_body["filename"], keywords=len(request_body["keywords"])) db_session.add(new_file) db_session.commit() for keyword in request_body["keywords"]: scrape_data_from_google.apply_async(args=[new_file.id, keyword]) return "Upload Completed", 200
def seller(): me_id = session.get("USERID") if me_id is None: return redirect(url_for('login')) me = User.query.filter_by(id=me_id).first() if me.type != 'Seller': return redirect(url_for('denied')) results = None if request.args.get("searchtext"): query = request.args.get("searchtext") sql_items_all = text('''SELECT * FROM items I WHERE I.name LIKE :term ''') results = engine.execute(sql_items_all, term='%' + query + '%') return render_template( 'seller.html', seller=me, listings=Listing.query.filter_by(seller_id=me_id).all(), warehouses=Warehouse.query.all(), cats=Category.query.all(), items=Item.query.all(), results=results, name=Name(), type=Type(), categories=Cats())
def updateMovie(self, movie_id, data): titleVal = data.get('title') directorVal = data.get('director') rankingVal = data.get('ranking') command = movies.update().where(movies.c.id == movie_id).values( title=titleVal, director=directorVal, ranking=rankingVal) resultsProxy = engine.execute(command)
def get_user_nerd_by_user_id_and_nerd_id(user_id=None, nerd_id=None): sql_query = nerd_user_association_table.select().where( nerd_user_association_table.c.user_id == user_id).where( nerd_user_association_table.c.nerd_id == nerd_id ) return engine.execute(sql_query).fetchall()
def browse_tables(): inspector = inspect(engine) table_names = {} print(request.args.get('tname')) column_details = "" # if request.args.get('sql_query') == 'true': # print(request.args.get("tname")) # print(request.args["tname"]) # print(engine.execute("SELECT * FROM "+request.args["tname"]).fetchall()) # return render_template('./menu.html') if request.args.get('tname') != None: print("IF") tname = request.args.get('tname') for column in inspector.get_columns(tname): print(column) column_details += str(column) + "\n" results = engine.execute("SELECT * FROM " + tname).fetchall() return render_template('./menu.html', table_names=inspector.get_table_names(), column_details=column_details, table_results=results) else: print("ELSE") for table_name in inspector.get_table_names(): print(table_name) for column in inspector.get_columns(table_name): #print (column) #print("Column: %s" % column['name']) table_names.update({table_name: column}) #print(Base.metadata.tables.keys()) return render_template('./menu.html', table_names=table_names)
def get_inserted_transcript_ids(): sql = "SELECT id FROM transcript" results = engine.execute(sql) transcript_ids = set((result['id'] for result in results)) return transcript_ids
def fetch_transcript_ids(self): transcript_ids = [] sql = "SELECT id FROM transcript ORDER BY id ASC" rows = engine.execute(sql) for row in rows: transcript_ids.append(row["id"]) return transcript_ids
def createNewMovies(self, data): titleVal = data.get('title') directorVal = data.get('director') rankingVal = data.get('ranking') command = movies.insert().values(title=titleVal, director=directorVal, ranking=rankingVal) resultsProxy = engine.execute(command)
def getMovieById(self, movie_id): command = movies.select().where(movies.c.id == movie_id) resultsProxy = engine.execute(command) results = resultsProxy.fetchone() schema = MovieSchema() resultDic = schema.dump(results) resultsProxy.close() return resultDic.data
def get_table_data(db: Session, name: str): my_table = Table(name, metadata, autoload=True, autoload_with=db) s = select([my_table]) result = engine.execute(s) temp = [] for row in result: temp.append(row) return temp
def getByUserId(self, user_id): command = bookings.select().where(bookings.c.userId == user_id) resultsProxy = engine.execute(command) results = resultsProxy.fetchall() schema = BookingSchema(many=True) resultDic = schema.dump(results) resultsProxy.close() return resultDic.data
def getById(self, booking_id): command = bookings.select().where(bookings.c.id == booking_id) resultsProxy = engine.execute(command) results = resultsProxy.fetchone() schema = BookingSchema() resultDic = schema.dump(results) resultsProxy.close() return resultDic.data
def list(self): command = bookings.select() resultsProxy = engine.execute(command) results = resultsProxy.fetchall() schema = BookingSchema(many=True) resultDic = schema.dump(results) resultsProxy.close() return resultDic.data
def getUserByName(self, user_id): command = users.select().where(users.c.id == user_id) resultsProxy = engine.execute(command) results = resultsProxy.fetchone() schema = UserSchema() resultDic = schema.dump(results) resultsProxy.close() return resultDic.data
def get_inserted_transcript_ids(): sql = ("SELECT id FROM transcript ORDER BY id ASC") results = engine.execute(sql) transcript_ids = set() for result in results: transcript_ids.add(result["id"]) return transcript_ids
def export(self): sql = "SELECT DISTINCT transcript_id FROM structure" results = engine.execute(sql) n = 0 with open(settings.structure_tids_filepath, "w") as f: for row in results: n += 1 f.write(row["transcript_id"]+"\n") print(str(n)+" structure transcript IDs written to "+settings.structure_tids_filepath)
def get_inserted_transcript_ids(): sql = "SELECT id FROM transcript ORDER BY id ASC" results = engine.execute(sql) transcript_ids = set() for result in results: transcript_ids.add(result["id"]) return transcript_ids
def cache_gene_locations(self, strain_config): print("Caching gene locations...") start = 0 while True: sql = """ SELECT t.gene_id, f.chromosome_id, f.direction, MIN(f.start) AS min_start, MAX(f.end) AS max_end FROM feature AS f INNER JOIN transcript AS t ON t.id = f.transcript_id WHERE f.strain_id = '{strain_id}' GROUP BY t.gene_id LIMIT {limit_}, {chunk} """ sql = sql.format(strain_id='Col_0', limit_=start, chunk=self.gene_location_chunk_size).replace( '\n', ' ') # sql = ("SELECT " # " transcript.gene_id, " # " feature.chromosome_id, " # " feature.direction, " # " MIN(start) min_start, " # " MAX(end) max_end " # "FROM feature, transcript " # "WHERE feature.transcript_id = transcript.id " # "AND feature.strain_id = 'Col_0' " # "GROUP BY transcript.gene_id " # "LIMIT " + str( start ) + ", " + str( self.gene_location_chunk_size )) results = engine.execute(sql) if results.rowcount == 0: break for row in results: db_session.add( GeneLocation(gene_id=row["gene_id"], strain_id=strain_config["name"], chromosome_id=row["chromosome_id"], start=row["min_start"], end=row["max_end"], direction=row["direction"])) start += self.gene_location_chunk_size db_session.commit()
def get_related_txid_list(conn, addresses): params = '' for address in addresses: params = params + "'" + address + "'," params = params[:-1] txes = engine.execute( text("select txout_txhash from vout where address in (%s) limit 10" % params)).fetchall() return [hexlify(tx[0]) for tx in txes]
def export(self): sql = "SELECT DISTINCT transcript_id FROM structure" results = engine.execute(sql) n = 0 with open(settings.structure_tids_filepath, "w") as f: for row in results: n += 1 f.write(row["transcript_id"] + "\n") print(str(n) + " structure transcript IDs written to " + settings.structure_tids_filepath)
def get_related_txid_list(conn, addresses): params = '' for address in addresses: params = params + "'" + address + "'," params = params[:-1] txes = engine.execute(text( "select txout_txhash from vout where address in (%s) limit 10" % params)).fetchall() return [hexlify(tx[0]) for tx in txes]
def get_related_tx_list(conn, addresses): params = '' for address in addresses: params = params + "'" + address + "'," params = params[:-1] txes = engine.execute(text( "select txout_tx_id from vout where address in (%s) limit 10" % params)).fetchall() return [db2t_tx(conn, Tx.query.filter(Tx.id == tx[0]).limit(1).first()) for tx in txes]
def read_by_user_id(user_id=None): """ This methods returns all the records with the given user_guid :param user_id: :return: """ sql_query = account_user_association_table.select().where( account_user_association_table.c.user_id == user_id) return engine.execute(sql_query).fetchall()
def update_table(input: StockRequest, db: Session = Depends(get_db)): """ Replace existing stocks data with the latest data from yfinance """ engine.execute('DELETE FROM Stocks') # delete all data from table for tick in eval(input.ticker): stock = StockItem() stock.ticker = tick db.add(stock) db.commit() fetch_stock_data(stock.id) return { "code": "success", "message": "stock table updated" }
def update_tags(document, submitted_tags): existing_tags = [] try: document.tags = [] db_session.commit() submitted_tags_dict = {'id': [int(tagID) for tagID in submitted_tags if tagID.isdigit()], 'text': [tag_text for tag_text in submitted_tags if not tag_text.isdigit()]} if len(submitted_tags_dict['text']) > 0: existing_tags = Tag.query.filter(Tag.text.in_(submitted_tags_dict['text'])).all() for tag in existing_tags: submitted_tags_dict['text'].remove(tag.text) submitted_tags_dict['id'].append(tag.id) for text in submitted_tags_dict['text']: document.tags.append(Tag(text)) for tagID in submitted_tags_dict['id']: engine.execute("INSERT INTO DocumentTag (tag_id, document_id) VALUES ({0}, {1})".format(tagID, document.id)) return True except Exception, ex: print('SQL EXCEPTION: {0}'.format(ex)) return False
def run(self): print("Compiling counts from raw lanes data...") engine.execute("TRUNCATE TABLE raw_reactivities") # empty the table sql = "SELECT DISTINCT id FROM transcript ORDER BY id" results = engine.execute(sql) tids = [] for row in results: tids.append(row["id"]) n_tids = len(tids) print(str(n_tids) + " transcript IDs fetched") print("Inserting...") chunk_start = 0 while True: # loop through chunks # gather transcript IDs tids_chunk = [] for i in range(chunk_start, chunk_start + self.chunk_size): if i >= n_tids: break tids_chunk.append(tids[i]) # grab all the raw lanes for the transcript IDs in the chunk self.fetch_raw_replicate_counts(tids_chunk) print(".", end="", flush=True) chunk_start += self.chunk_size if chunk_start % 1000 == 0: print(chunk_start) if chunk_start >= n_tids: break print(str(n_tids) + " transcripts processed")
def auth(): # Lowercase passwords so password check is case insensitive username = request.values["username"].lower() password = request.values["password"].lower() # username = request.values["username"] # password = request.values["password"] rows = engine.execute("select id from users where name = '%s' and password = '******'" % (username, password)).first() # rows = User.query.filter(User.name == username).filter(User.password == password).first() if rows: return jsonify(status='ok', username=username) # Instead of returning username, return a hashed token. # The UserAPI is the only part of the system that can turn username into a token, and only will with the password # So an attacker needs the password to do anything on behalf of the user with other services # (Or to snoop the token, we could use a nonce, or time based token to counter that if we wanted) # return jsonify(status='ok', username=hashlib.sha256(SECRET_KEY+str(rows.name)).hexdigest()) else: response = jsonify({'code': 404,'message': 'Username or password is incorrect'}) response.status_code = 404 return response
def cache_gene_locations(self, strain_config): print("Caching gene locations...") start = 0 while True: sql = ( "SELECT " " transcript.gene_id, " " feature.chromosome_id, " " feature.direction, " " MIN(start) min_start, " " MAX(end) max_end " "FROM feature, transcript " "WHERE feature.transcript_id = transcript.id " "AND feature.strain_id = 'Col_0' " "GROUP BY transcript.gene_id " "LIMIT " + str(start) + ", " + str(self.gene_location_chunk_size) ) results = engine.execute(sql) if results.rowcount == 0: break for row in results: db_session.add( GeneLocation( gene_id=row["gene_id"], strain_id=strain_config["name"], chromosome_id=row["chromosome_id"], start=row["min_start"], end=row["max_end"], direction=row["direction"], ) ) start += self.gene_location_chunk_size db_session.commit()
def read(user_id, account_id): sql_query = account_user_association_table.select().where( account_user_association_table.c.user_id == user_id).where( account_user_association_table.c.account_id == account_id) return engine.execute(sql_query).fetchall()