def reset_db(): get_db().execute(models.DELETE_ALL_RECORDS) get_db().commit() msg = "All data deleted!" url = "/page_data?msg={0}".format(quote(msg)) return redirect(url, code=302)
def setUp(self): """ Set up """ # Init Flask ctx self.ctx = app.app.app_context() self.ctx.push() app.get_db()
def _get_users(): start = datetime.datetime(2017, 1, 9) # pre-launch date days = (datetime.datetime.today() - start).days dates = [ start + datetime.timedelta(inc) for inc in range(1, days + 2, 10) ] # Stats every 10 days result = {} confirmed = [] registered = [] fra = [] for d in dates: registered.append(get_db().users.find({ 'registered_on': { '$lte': d } }).count()) confirmed.append(get_db().users.find({ 'confirmed': True, 'registered_on': { '$lte': d } }).count()) fra.append(get_db().users.find({ 'events.fra.registered': True, 'registered_on': { '$lte': d } }).count()) result['labels'] = [d.strftime('%d-%m') for d in dates] result['fra'] = fra result['confirmed'] = confirmed result['registered'] = registered return result
def resume(oid=None): # Allowed files def allowed_file(filename): return '.' in filename and filename.rsplit('.', 1)[1] in ['pdf'] if request.method == 'POST': file = request.files.get('resume') profile = current_user.data.get('profile') is_valid_profile = profile.get('first_name') and profile.get('name') if file and allowed_file(file.filename) and is_valid_profile: filename = secure_filename(file.filename) oid = ObjectId() s3_client.put_object(Bucket=os.environ.get('BUCKET_NAME'), Metadata={'filename': filename}, ContentType=file.content_type, Body=file, Key=f'resumes/{oid}.pdf') get_db().users.update_one( {'id': current_user.id}, {'$set': { 'profile.resume_id': str(oid) }}) return 'success' else: abort(500) if request.method == 'DELETE': oid = request.form['oid'] s3_client.delete_object(Bucket=os.environ.get('BUCKET_NAME'), Key=f'resumes/{oid}.pdf') get_db().users.update_one({'id': current_user.id}, {'$unset': { 'profile.resume_id': 1 }}) return 'success'
def nogood_test_list_page(client): """Right now this fails missurably, but when it was 'working' it was operating on the live database instead of the temporary one like the docs implied it would.""" with app.app.app_context(): with client as c: from flask import session, g from users.models import User, Role import app print(app.app.config['DATABASE_PATH']) app.get_db(app.app.config['DATABASE_PATH']) # access without login result = c.get('/user/delete/3/', follow_redirects=True) assert result.status_code == 200 assert b'Permission Denied' in result.data rec = User(app.g.db).get('John') print(rec) # Login as user role result = c.post('/login/', data={ 'userNameOrEmail': 'John', 'password': '******' }, follow_redirects=True) assert result.status == '200 OK' assert b'Invalid User Name or Password' not in result.data assert session['user'] == 'John' #attempt to delete a record result = c.get('/role/delete/3/', follow_redirects=True) assert result.status_code == 200 assert b'Permission Denied' in result.data
def insert_user(detail_dict): keys = list(detail_dict.keys()) prequel_string = 'INSERT INTO `user_details` (' + ','.join( ["`" + str(key) + "`" for key in keys]) + ') ' format_data = [] for x in keys: if is_number(detail_dict[x]): format_data.append(str(detail_dict[x])) else: format_data.append("'" + detail_dict[x] + "'") data_string = 'VALUES (' + ', '.join(format_data) + ');' # print("Prequel") # print(prequel_string) # print("Data") # print(data_string) query_string = prequel_string + data_string with app.app_context(): # insert the user db = get_db() cur = get_db().execute(query_string) # check for debugging print("Newly inserted user") new_user = query_db('select * from user_details where username = ?', [detail_dict['username']], one=True) if new_user is None: print("something is wrong") else: print("Yay new user!") print(new_user)
def fix_dates(): from datetime import datetime cur = get_db().users.find({}, { '_id': 1, 'registered_on': 1, 'confirmed_on': 1 }) for c in cur: if type(c['registered_on']) == str: print(f'original: {c["registered_on"]}') new_d = datetime.strptime(c['registered_on'], '%a, %d %b %Y %H:%M:%S %Z') get_db().users.update_one({'_id': c['_id']}, {'$set': { 'registered_on': new_d }}) print(f'fixed: {new_d}') if type(c['confirmed_on']) == str: print(f'original: {c["confirmed_on"]}') new_d = datetime.strptime(c['confirmed_on'], '%a, %d %b %Y %H:%M:%S %Z') get_db().users.update_one({'_id': c['_id']}, {'$set': { 'confirmed_on': new_d }}) print(f'fixed: {new_d}')
def queryrate(currency, _date): base = 'USD' log.debug("querying forex '%s' rate on '%s'", currency, _date.date()) try: uri = "https://api.fixer.io/%s?base=%s&symbols=%s" % (_date.date(), base, currency) response = requests.get(uri) data = json.loads(response.text) except Exception as e: log.exception("error querying forex rates") return False else: if response.status_code != 200: return log.error("forex status=%s, text=%s", response.status_code, response.text) get_db().forex.insert_one({ "date": _date, "USD": 1, "CAD": data["rates"][currency] }) log.debug("forex rate='%s", data["rates"][currency]) return data["rates"][currency]
def validate_section(): page = request.form.get('page') if not current_user.data.get(page): get_db().companies.update_one({'id': current_user.id}, {'$set': {page: True}}) return "success" else: return "error"
def query_api_mkt(): """Update 5T market index data from coinmarketcap.com REST API. """ t1 = Timer() try: r = requests.get("https://api.coinmarketcap.com/v1/global") data = json.loads(r.text) except Exception as e: return log.error("API error %s", r.status_code) if r.status_code != 200: return log.error("API error %s", r.status_code) print(r.status_code) store = {} for m in coinmarketcap['api']['markets']: store[m["to"]] = m["type"]( data[m["from"]] ) get_db().cmc_mkt.replace_one( {'date':store['date']}, store, upsert=True) log.info("Coinmktcap markets updated. [{}ms]".format(t1))
def api_questions(): con = get_db() cur = con.cursor() session['cats'] = dict() for k,v in request.args.items(): if k == 'quality': session['quality'] = int(v) else: session['cats'][k] = int(v) con = get_db() cur = con.cursor() cur.execute("SELECT name FROM categories ORDER BY id ASC;") cats = [q['name'] for q in cur] if 'quality' not in session: session['quality'] = 50 if not any(session['cats'][cat]==1 for cat in session['cats']): query = """ SELECT Q.id, Q.body_html, Q.creation_date, Q.last_activity_date, Q.link, Q.title, Q.author_id, Q.quality_score, '' AS chosen_categories, GROUP_CONCAT(DISTINCT C.name ORDER BY C.name) AS categories FROM questions AS Q JOIN question_tags AS QT ON QT.question_id = Q.id JOIN tag_categories AS TC ON TC.tag_id = QT.tag_id JOIN categories AS C ON C.id = TC.category_id WHERE Q.accepted_answer_id IS NULL AND Q.quality_score >= %s GROUP BY Q.id ORDER BY Q.last_activity_date DESC LIMIT 30;""" else: catlist = ','.join("'{}'".format(cat) for cat in session['cats'] if \ session['cats'][cat] == 1) query = """ SELECT Q.id, Q.body_html, Q.creation_date, Q.last_activity_date, Q.link, Q.title, Q.quality_score, Q.author_id, GROUP_CONCAT(DISTINCT C.name ORDER BY C.name) AS categories, GROUP_CONCAT(DISTINCT IF(C.name in ({}), C.name, NULL) ORDER BY C.name) AS chosen_categories FROM categories AS C JOIN tag_categories AS TC ON C.id=TC.category_id JOIN question_tags AS QT ON QT.tag_id=TC.tag_id JOIN questions AS Q ON Q.id=QT.question_id WHERE Q.quality_score >= %s AND Q.accepted_answer_id IS NULL GROUP BY Q.id HAVING CHAR_LENGTH(chosen_categories) > 0 ORDER BY last_activity_date DESC LIMIT 30 """.format(catlist) cur.execute(query, session['quality']) result = cur.fetchall() for i in range(len(result)): result[i]['creation_date'] = result[i]['creation_date'].strftime('%Y-%m-%d %H:%M:%S') result[i]['last_activity_date'] = result[i]['last_activity_date'].strftime('%Y-%m-%d %H:%M:%S') return Response(json.dumps(result, indent=4), mimetype='application/json')
def do_allotment(): # Start with highest cgpi # allot the lowest preference possible get_db() # Why g.db is not available? Due to flask_login? MAX_CLASS_SIZE = int(os.getenv('MAX_CLASS_SIZE', 1)) # Maximum no. of students in a class print("MAX_CLASS_SIZE=", MAX_CLASS_SIZE) with g.db as conn: cur = conn.cursor() cur.execute('''DELETE FROM alloted''') cur.execute('''SELECT roll_number,scode,preference,cgpi FROM preferences NATURAL JOIN users ORDER BY cgpi DESC, preference ASC''' ) result = cur.fetchall() for roll_number, scode, _, _ in result: cur.execute( '''SELECT roll_number FROM alloted WHERE roll_number=(%s)''', (roll_number, )) done = cur.fetchone() if not done: cur.execute( '''SELECT count(*) from alloted where scode=(%s)''', (scode, )) class_size = cur.fetchone() class_size = class_size[0] if class_size < MAX_CLASS_SIZE: cur.execute('''INSERT INTO alloted VALUES (%s,%s)''', (roll_number, scode)) cur.execute( '''SELECT roll_number,scode,sname FROM alloted NATURAL JOIN course''' ) table = cur.fetchall() # table = (('')) # table = [ # ('17mi510','PHO-325','NUCLEAR SCIENCE AND ITS APPLICATIONS'), # ('17mi526','PHO-316','QUANTUM MECHANICS & ITS APPLICATIONS'), # ('17mi528','MSO-326(b)','FUEL CELL AND HYDROGEN ENERGY'), # ('17mi527','MSO-326(a)','NANO-MATERIALS & TECHNOLOGY'), # ('17701','MSO-317','FUEL CELL & HYDROGEN ENERGY'), # ('17566','MEO-325','MODELLING AND SIMULATION'), # ('17mi432','MEO-316','QUALITY ENGINEERING'), # ('17mi532','PHO-325','NUCLEAR SCIENCE AND ITS APPLICATIONS'), # ('17432','PHO-316','QUANTUM MECHANICS & ITS APPLICATIONS'), # ('17123','MSO-326(b)','FUEL CELL AND HYDROGEN ENERGY'), # ('17423','MSO-326(a)','NANO-MATERIALS & TECHNOLOGY'), # ('17323','MSO-317','FUEL CELL & HYDROGEN ENERGY'), # ('17683','MEO-325','MODELLING AND SIMULATION'), # ('17221','MEO-316','QUALITY ENGINEERING')] # table = [('17mi432','CEO-312','asdf'),('17MI528','EEO-312(a)','Neural Network')]*30 # for testing purposes only return render_template('allotment.html', table=table)
def setUp(self): # Manually set database config params to vagrant only DB app.app.config["DATABASE"] = 'vagrant' for item in ["PG_HOST", "PG_USER", "PG_PASS"]: app.app.config[item] = "" app.app.config["TESTING"] = True self.app = app.app.test_client() with app.app.app_context(): app.get_db() self.resetDB()
def send_event(old_company, company, page): zone, company_id = company.get('zone'), company.get('name') dt = (datetime.datetime.utcnow() + datetime.timedelta(hours = 1)).strftime('%A %H:%M:%S') try: diff = get_diff(old_company, company) except Exception as e: diff = {'error': e} if diff: get_db().stream.insert({'delivered': False, 'validated': False, 'section': page, 'zone': zone, 'created_on': dt, 'company': company_id, 'diff': diff})
def _update_ambassador(value, day): old_val = get_db().users.find_one( {'id': current_user.id}, {'events.fra.ambassador': 1})['events']['fra'].get('ambassador') if old_val and old_val.get(day): get_db().companies.update_one( {'id': old_val.get(day)}, {'$unset': { 'ambassadors.{}'.format(day): 1 }}) get_db().users.update_one( {'id': current_user.id}, {'$unset': { 'events.fra.ambassador.{}'.format(day): 1 }}) if value != 'none': get_db().companies.update_one( {'id': value}, {'$set': { 'ambassadors.{}'.format(day): current_user.id }}) get_db().users.update_one( {'id': current_user.id}, {'$set': { 'events.fra.ambassador.{}'.format(day): value }})
def complete_companies(): path = os.path.join(os.path.dirname(__file__), 'data/Entreprises2018.csv') reader = csv.DictReader(open(path, 'rt', encoding='utf8'), delimiter=';') for row in reader: get_db().companies.update_one({'id': row['id_entreprise']}, {'$set': { 'info': row }}) get_db().companies.update_one({'id': row['id_entreprise']}, {'$unset': { 'info.id_entreprise': 1 }})
def complete_companies(): path = os.path.join(os.path.dirname(__file__), 'data/new_companies.csv') reader = csv.DictReader(open(path, 'rb')) for row in reader: get_db().companies.update_one({'id': row['id_entreprise']}, {'$set': { 'info': row }}) get_db().companies.update_one({'id': row['id_entreprise']}, {'$unset': { 'info.id_entreprise': 1 }})
def app(): app = create_app({'TESTING': True}) # Populating database with entries from /tests/data.sql with app.app_context(): db = get_db() db.session.execute(_data_sql) db.session.commit() yield app # Dropping database with app.app_context(): get_db().drop_all()
def client(): db_fd, app.app.config['DATABASE_PATH'] = tempfile.mkstemp() app.app.config['TESTING'] = True client = app.app.test_client() with app.app.app_context(): with app.app.test_request_context(): #this context sets up a dummy request with a url of 'http://localhost/' app.initalize_all_tables( (app.get_db(app.app.config['DATABASE_PATH']))) app.get_db() yield client os.close(db_fd) os.unlink(app.app.config['DATABASE_PATH'])
def sell(doc, candle, orderbook=None, criteria=None): """Close off existing position and calculate earnings. """ global client ob = orderbook if orderbook else client.get_orderbook_ticker( symbol=candle['pair']) bid = np.float64(ob['bidPrice']) pct_fee = BINANCE['PCT_FEE'] buy_vol = np.float64(doc['orders'][0]['volume']) buy_quote = np.float64(doc['orders'][0]['quote']) p1 = np.float64(doc['orders'][0]['price']) pct_gain = pct_diff(p1, candle['close']) quote = buy_quote * (1 - pct_fee / 100) fee = (bid * buy_vol) * (pct_fee / 100) pct_net_gain = net_earn = pct_gain - (pct_fee * 2) #quote - buy_quote duration = now() - doc['start_time'] candle['buy_ratio'] = candle['buy_ratio'].round(4) get_db().trades.update_one({'_id': doc['_id']}, { '$push': { 'snapshots': criteria['snapshot'] }, '$push': { 'orders': odict({ 'action': 'SELL', 'ex': 'Binance', 'time': now(), 'price': candle['close'], 'volume': 1.0, 'quote': buy_quote, 'fee': fee, 'orderbook': ob, 'candle': candle, }) }, '$set': { 'status': 'closed', 'end_time': now(), 'duration': int(duration.total_seconds()), 'pct_gain': pct_gain.round(4), 'pct_net_gain': pct_net_gain.round(4), } }) return doc['_id']
def show_home(stdscr): db = get_db() n_indexed = db.cmc_tick.count() + db.cmc_tick.count() +\ db.cmc_mkt.count() + db.cmc_mkt.count() stdscr.clear() stdscr.addstr(0, 2, "%s datapoints indexed" % pretty(n_indexed, abbr=True)) updated = "Updated 1 min ago" # + to_relative_str(utc_datetime() - mktdata[0]["date"]) stdscr.addstr(0, stdscr.getmaxyx()[1] - len(updated) - 2, updated) stdscr.addstr(3, 0, "") title=\ [" ██████╗ ██████╗ ██╗███╗ ██╗ ██████╗██████╗ ██╗ ██╗███╗ ██╗ ██████╗██╗ ██╗███████╗██████╗ "]+\ ["██╔════╝██╔═══██╗██║████╗ ██║██╔════╝██╔══██╗██║ ██║████╗ ██║██╔════╝██║ ██║██╔════╝██╔══██╗"]+\ ["██║ ██║ ██║██║██╔██╗ ██║██║ ██████╔╝██║ ██║██╔██╗ ██║██║ ███████║█████╗ ██████╔╝"]+\ ["██║ ██║ ██║██║██║╚██╗██║██║ ██╔══██╗██║ ██║██║╚██╗██║██║ ██╔══██║██╔══╝ ██╔══██╗"]+\ ["╚██████╗╚██████╔╝██║██║ ╚████║╚██████╗██║ ██║╚██████╔╝██║ ╚████║╚██████╗██║ ██║███████╗██║ ██║"]+\ [" ╚═════╝ ╚═════╝ ╚═╝╚═╝ ╚═══╝ ╚═════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝"] for line in title: stdscr.addstr(stdscr.getyx()[0] + 1, int(stdscr.getmaxyx()[1] / 2 - len(line) / 2), line) # Print menu options width = stdscr.getmaxyx()[1] x = int(width / 2) - 10 stdscr.addstr(stdscr.getyx()[0] + 3, x, "G Global Market") stdscr.addstr(stdscr.getyx()[0] + 1, x, "S Signals") stdscr.addstr(stdscr.getyx()[0] + 1, x, "H Ticker History") stdscr.addstr(stdscr.getyx()[0] + 1, x, "D Data Patterns") stdscr.addstr(stdscr.getyx()[0] + 1, x, "W My Watchlist") stdscr.addstr(stdscr.getyx()[0] + 1, x, "P My Portfolio") stdscr.addstr(stdscr.getyx()[0] + 1, x, "Q Quit")
def init(evnt_pairs): from app.common.timer import Timer from app.common.timeutils import strtofreq from . import candles, scanner global client, dfc, e_pairs e_pairs = evnt_pairs t1 = Timer() db = app.get_db() # Auth Binance client. cred = list(db.api_keys.find())[0] client = Client(cred['key'], cred['secret']) # Get available exchange trade pairs info = client.get_exchange_info() ops = [ UpdateOne({'symbol': n['symbol']}, {'$set': n}, upsert=True) for n in info['symbols'] ] db.assets.bulk_write(ops) #print("{} active pairs retrieved from api.".format(len(ops))) set_pairs([], 'DISABLED', query_temp=True) #print("{:,} historic candles loaded.".format(len(dfc))) print('{} trading algorithms.'.format(len(TRD_ALGOS))) print('app.bot initialized in {:,.0f} ms.'.format(t1.elapsed()))
def add_book(): data = request.get_json() try: db = get_db() cur = db.cursor() cur.execute("BEGIN") cur.execute( "INSERT INTO book(ISBN, author, title, year, pages, numOfCopies) VALUES (?,?,?,?,?,?)", (data['ISBN'], data['author'], data['title'], data['year'], data['pages'], data['numOfCopies'])) for i in range(int(data['numOfCopies'])): cur.execute("INSERT INTO bookItem(ISBN) VALUES (?)", [data['ISBN']]) cur.execute("COMMIT") response_object = {'status': 'success'} return jsonify(response_object) except Exception as ex: print(ex) cur.execute("ROLLBACK") response_object = { 'status': 'fail', } return jsonify(response_object)
def checkout_book(): data = request.get_json() try: db = get_db() cur = db.cursor() # Check if book is already checked out by a member cur.execute("SELECT * FROM loan where bi_id = ? AND status = 'active'", [data['b_id']]) db.commit() loaned_book = cur.fetchone() if loaned_book: response_object = { 'status': 'fail', 'message': 'book already loaned' } return jsonify(response_object) cur.execute( "INSERT INTO loan(bi_id, m_id, checkout_date, due_date, librarian_id, status) VALUES (?,?,?,?,?,?)", (data['b_id'], data['m_id'], date.today(), data['return_date'], data['l_id'], 'active')) db.commit() response_object = {'status': 'success'} return jsonify(response_object) except Exception as ex: print(ex) response_object = {'status': 'fail'} return jsonify(response_object)
def corr_minmax(symbol, start, end, maxrank): """Find lowest & highest price correlation coins (within max_rank) with given ticker symbol. """ db = get_db() coins = topcoins(maxrank) df = price_matrix(coins, start, end, '5T') if len(df) < 1: return {"min": None, "max": None} corr = df.corr() col = corr[symbol] del col[symbol] return { "symbol": symbol, "start": start, "end": end, "corr": col, "min": { col.idxmin(): col[col.idxmin()] }, "max": { col.idxmax(): col[col.idxmax()] } }
def search_book(): data = request.get_json() db = get_db() cur = db.cursor() print(data['title']) cur.execute("SELECT * FROM book where title LIKE ? ", ['%' + data['title'] + '%']) books = cur.fetchall() books_output = [] for book in books: book_data = {} book_data['ISBN'] = book['ISBN'] book_data['title'] = book['title'] book_data['author'] = book['author'] book_data['year'] = book['year'] books_output.append(book_data) response_object = {'status': 'success', 'books': books_output} return jsonify(response_object)
def add_expense(): if not request.is_json or 'name' not in request.json: abort(400) db = get_db() database.add_expense(db, request.json['name']) return '', 200
def login(): if request.method == 'POST': from app import get_db db = get_db() error = None username = request.form['username'] password = request.form['password'] user = db.session.execute( "SELECT * FROM users WHERE username = :username", { 'username': username }).fetchone() if user is None: error = 'Incorrect username.' elif not check_password_hash(user['password'], password): error = 'Incorrect password.' if error is None: session.clear() session['user_id'] = user['id'] return redirect(url_for('index')) flash(error) return render_template('auth/login.html')
def gallery(): error = "" try: # access database cnx = get_db() cursor = cnx.cursor() # file path of images APP_RELATED = 'images/' + session['username'] # fetch names of the images owned by user cursor.execute("SELECT userID FROM users WHERE username = (%s)", (escape_string(session['username']))) uID = cursor.fetchone()[0] cursor.execute("SELECT pName FROM images WHERE users_userID = (%s)", (int(uID))) imagenames = cursor.fetchall() # store image paths and pass to frontend images = [] for imagename in imagenames: images.append(APP_RELATED + '/' + imagename[0]) #cleanup cursor.close() cnx.close() return render_template("thumbnail-gallery.html", title="Gallery", images=images) except Exception as e: teardown_db(e) return str(e)
def survey(): if 'logged_in' not in session or not session['logged_in']: return redirect(url_for('login')) qns = query_db('select * from questions_list') # print(':(') if request.method == 'POST': results = {} # print(':)') for i in range(1, len(qns) + 1): # print(i) results[i] = int(request.form[str(i)]) scores = processData(results) # print('hi') code = 'abc' with app.app_context(): db = get_db() query_string = 'INSERT INTO score_list (user_id, code, growth, confidence, strategic, productive, team) VALUES ' data_string = '(' + str( session['id']) + ', "' + code + '", "' + str( scores['growth']) + '", "' + str( scores['confidence']) + '", "' + str( scores['strategic']) + '", "' + str( scores['productive']) + '", "' + str( scores['team']) + '");' db.execute(query_string + data_string) db.commit() return redirect(url_for('index')) return render_template('surveypage.html', questions=qns)
def show_entries(): """gets a db and executes a SQL statement fetchall() - fetches all rows of a query result - returns a list """ db = get_db() cur = db.execute('select title, text from entries order by id desc') entries = cur.fetchall() return render_template('show_entries.html', entries=entries)
def update_permissions(qid, admin_ids, manager_ids, blocked_user_ids): db = get_db() db.execute(DELETE_PERMISSIONS, (qid,)) if admin_ids is not None: for pid in admin_ids: db.execute(ADD_PERMISSION, (pid, qid, ADMIN)) if manager_ids is not None: for pid in manager_ids: if pid not in admin_ids: db.execute(ADD_PERMISSION, (pid, qid, MANAGER)) db.commit()
def add_entry(): if not session.get('logged_in'): # if user types /add at end of URL abort(401) #unauthorized http if request.form['title'] != '' or request.form['text'] != '': db = get_db() db.execute('insert into entries (title, text) values (?, ?)', [request.form['title'], request.form['text']]) # (?, ?) - used to avoid SQL injection db.commit() flash('New entry successfully posted!') else: flash('Must enter a title and text') return redirect(url_for('show_entries'))
def run(self): while self.running: print 'start' self.db = get_db() try: self.fetch() except KeyboardInterrupt: self.running = False print 'stop' except Exception as e: print e else: print 'sleep' time.sleep(INTERVAL) print 'end' self.db.close()
def add_permission_list(pids, qid, permission): db = get_db() for pid in pids: db.execute(ADD_PERMISSION, (pid, qid, permission)) db.commit()
def add_permission(pid, qid, permission): db = get_db() db.execute(ADD_PERMISSION, (pid, qid, permission)) db.commit()
def api_categories(): con = get_db() cur = con.cursor() cur.execute("SELECT * FROM categories ORDER BY id ASC;") cats = [q['name'] for q in cur] return Response(json.dumps(cats, indent=4), mimetype='application/json')
def user_visit(): db = get_db() user = db.books.find_one({}) print(user) return dumps(user)
def questions(): con = get_db() cur = con.cursor() cur.execute("SELECT * FROM categories ORDER BY name ASC;") categories = cur.fetchall() return render_template('questions.html', categories=categories)
#!/usr/bin/env python # -*- coding: utf-8 -*- from passlib.apps import custom_app_context as pwd_context import app db = app.get_db() class IdAndToDictMixin(object): id = db.Column(db.Integer, primary_key=True, autoincrement=True) def to_dict(self): _d = {} for _column in self.__table__.columns: if getattr(self, _column.name): _d[_column.name] = str(getattr(self, _column.name)) if '_id' in _column.name: _c = _column.name[0: -3] if hasattr(self, _c): _o = getattr(self, _c) if hasattr(_o, 'name'): _d[_c] = _o.name return _d class TimestampMixin(object): created_time = db.Column(db.DateTime, default=db.func.now()) updated_time = db.Column( db.DateTime, default=db.func.now(), onupdate=db.func.now())