def brandsModelsCars(): query = """ select brand.id, brand.name, model.id, model.name, car.id, car.name, car.power, car.weight, car.year, car.image_url from car inner join model on car.model_id = model.id inner join brand on brand.id = model.brand_id """ args = [] search = request.args.get('search') order = request.args.get('sort') if search is None and order is None: # No url query param: let's randomize the cars we fetch countCursor = get_db().cursor() countCursor.execute("select count(*) from car") count = countCursor.fetchone()[0] args = list(range(LIMIT_NUMBER)) for i in range(0, LIMIT_NUMBER): args[i] = randrange(1, count, 1) if i == 0: query = query + "where car.id = ? " else: query = query + "or car.id = ? " else: # url query param for search and/or order if search is not None: search = "%" + search.upper() + "%" query = query + " where upper(brand.name) like ? or upper(model.name) like ? or upper(car.name) like ?" args = [search] * 3 if order == "power" or order == "ratio" or order == "weight": if order == "ratio": order = "weight / car.power asc" elif order == "power": order = "power desc" elif order == "weight": order = "weight asc" query = query + " order by car." + order query = query + LIMIT_SQL_CLAUSE cursor = get_db().cursor() cursor.execute(query, args) rows = cursor.fetchall() result = buildBrandModelCars(rows) return jsonify(result)
def delete_orders(id): con = get_db() cur = con.cursor() cur.execute("DELETE FROM client_order WHERE id = ?", [id]) result = cur.rowcount con.commit() return "%s ROW(S) DELETED" % result
def menu(): cursor = get_db().cursor() cursor.execute("SELECT * FROM menu") rows = cursor.fetchall() objects = list( map(lambda m: { "id": m[0], "title": m[1], "price": m[2] }, rows)) return jsonify(objects)
def main(): db = get_db() nasdaq = db.NASDAQ nasdaq.drop() csv = pandas.read_csv('migrations/companylist.csv') csv = csv.drop(['LastSale', 'ADR TSO', 'IPOyear', 'Unnamed: 9'], 1) csv.columns = [ 'symbol', 'name', 'market_cap', 'sector', 'industry', 'summary_quote' ] documents = csv.to_dict('index') documents = documents.values() nasdaq.insert_many(documents)
def preference(): if request.method == 'POST': db = init_db.get_db() if 'name' in request.json: cur = db.execute( 'select * from users where GroupPassword={}'.format( request.json['password'])) entries = cur.fetchall() if len(entries) == 0: db.execute( 'insert into swipes (GroupPassword) values ({})'.format( request.json['password'])) db.commit() db.execute( 'insert into users (UserName, GroupPassword, LocationPreference) values (?, ?, ?)', [ request.json['name'], request.json['password'], request.json['location'] ]) db.commit() cur1 = db.execute('select * from users') cur2 = db.execute('select * from swipes') return json.dumps(dict(cur2.fetchone())) else: cur = db.execute( 'select * from users where GroupPassword={}'.format( request.json['password'])) numUsers = len(cur.fetchone()) cur = db.execute( 'select * from swipes where GroupPassword={}'.format( request.json['password'])) numSwipes = dict(cur.fetchone()) matched = "" if numUsers in numSwipes.values(): matched = str( numSwipes.keys()[numSwipes.values().index(numUsers)]) if request.json['pref'] == 'noPref': if matched != "": return matched return restaurants.iloc[request.json['count']].to_json( orient='index') else: if matched != "": return matched preferred_location = restaurants.loc[restaurants['ZONE'] == request.json['pref']] preferred_location = preferred_location.sort_values( by='HEURISTIC', ascending=False) return preferred_location.iloc[request.json['count']].to_json( orient='index')
def get_orders(): cursor = get_db().cursor() cursor.execute( "SELECT id, client, descr, price, datetime(date) date FROM client_order" ) rows = cursor.fetchall() objects = list( map( lambda m: { "id": m[0], "client": m[1], "description": m[2], "price": m[3], "date": m[4] }, rows)) return jsonify(objects)
def food(): cursor = get_db().cursor() cursor.execute("SELECT * FROM food") rows = cursor.fetchall() objects = list( map( lambda m: { "id": m[0], "menuId": m[1], "title": m[2], "description": m[3], "category": m[4], "price": m[5], "photo": m[6] }, rows)) return jsonify(objects)
def post_orders(): validation_errors = validate_payload( request, ['id', 'date', 'description', 'price', 'client']) if len(validation_errors) > 0: return "\n".join(validation_errors), 400 order = request.get_json(force=True, silent=True) if not datetime_valid(order["date"]): return "You sent an unexpected date format. Expected format is ISO string like '2016-12-13T21:20:37.593194+00:00Z'", 400 con = get_db() cur = con.cursor() cur.execute("SELECT * FROM client_order WHERE id = ?", [order["id"]]) rows = cur.fetchall() if (len(rows) > 0): return "Impossible to create the order with the id '%s': this id already exists" % order[ "id"], 400 cur.execute( "INSERT INTO client_order(id, client, descr, price, date) VALUES (?, ?, ?, ?, ?)", (order["id"], order["client"], order["description"], order["price"], order["date"])) result = cur.rowcount con.commit() return "%s ROW(S) INSERTED" % result
from sklearn.metrics import mean_squared_error import urllib import urllib2 from pandas_datareader import DataReader import datetime import time import csv import re import pymongo from pandas.tseries.offsets import BDay from init_db import get_db db = get_db() stocks = [] start = datetime.datetime(2010, 1, 1) end = datetime.datetime.today() + datetime.timedelta(days=1) discriminator = None if (datetime.datetime.utcnow().weekday() < 4 or datetime.datetime.utcnow().weekday() == 6 ) and datetime.datetime.utcnow() < datetime.datetime.fromordinal( datetime.datetime.utcnow().date().toordinal()) + datetime.timedelta( hours=22): discriminator = datetime.datetime.utcnow().date() else:
def cars(): cursor = get_db().cursor() cursor.execute("select * from car" + LIMIT_SQL_CLAUSE) rows = cursor.fetchall() return jsonify(rows)
def models(): cursor = get_db().cursor() cursor.execute("select * from model") rows = cursor.fetchall() return jsonify(rows)
def brands(): cursor = get_db().cursor() cursor.execute("select * from brand") rows = cursor.fetchall() return jsonify(rows)