def test_save_inserts_new_data(self): """ save should create rows for new data, and new data only""" # run SUT cursor = get_cursor() service_id = save( cursor, 'service', # table name ['service_name'], # unique columns ['service_name'], # all columns ('mock_service_name',), # values to insert ) cursor.close() # save returns an integer id for the service table self.assertTrue(isinstance(service_id, int)) # save creates one row per call with the saved data cursor = get_cursor() cursor.execute( "SELECT service_id, service_name\n" + \ "FROM service\n" + \ "WHERE service_name='mock_service_name'" ) result = cursor.fetchall() cursor.close() self.assertEqual(len(result), 1) self.assertEqual(result[0][1], 'mock_service_name')
def season(season=None, week=None): if season is None: seasons = sorted([ x[0] for x in get_cursor().execute('SELECT UNIQUE SEASON FROM GAME') ]) return render_template('season_list.html', seasons=seasons) if week is None: weeks = sorted([ x[0] for x in get_cursor().execute( 'SELECT UNIQUE WEEK FROM GAME WHERE SEASON=:0', (season, )) ]) return render_template('week_list.html', season=season, weeks=weeks) games = get_cursor().execute( ''' SELECT A.TEAM_NAME,B.TEAM_NAME,G.* FROM jpeel.GAME G,jpeel.TEAM A, jpeel.TEAM B WHERE A.TEAM_ABBR = HOME_TEAM AND B.TEAM_ABBR = AWAY_TEAM AND SEASON=:0 AND WEEK=:1 ''', (season, week)).fetchall() return render_template('season.html', season=season, week=week, games=games)
def set_user_age(telegram_id, age: int): sql = ''' UPDATE user SET age = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (age, telegram_id)) db.get_connection().commit()
def set_user_height(telegram_id, height: int): sql = ''' UPDATE user SET height = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (height, telegram_id)) db.get_connection().commit()
def set_lasttime(telegram_id: int, dt_last: str, force_commit: bool = True): sql = ''' UPDATE user SET datetime_last = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (dt_last, telegram_id)) if force_commit: db.get_connection().commit()
def set_user_sex(telegram_id, sex: bool): sql = ''' UPDATE user SET sex = ? WHERE telegram_id = ? ''' sqlite3.register_adapter(bool, int) db.get_cursor().execute(sql, (sex, telegram_id)) db.get_connection().commit()
def set_user_weight(telegram_id, weight: float): sql = ''' UPDATE user SET weight = ? WHERE telegram_id = ? ''' sqlite3.register_adapter(bool, int) db.get_cursor().execute(sql, (weight, telegram_id)) db.get_connection().commit()
def get_today_statistics(user_id: int) -> str: """ Функция получения статистики за день :param user_id: идентификатор пользователя в VK :return: сообщения о расходах за день """ cursor = db.get_cursor() # Получения всех расходов за день cursor.execute("SELECT SUM(amount) " "FROM expenses " f"WHERE DATE(created)=DATE('now', 'localtime') AND user_id = {user_id}") result = cursor.fetchone() if not result[0]: return "Сегодня ещё нет расходов" all_today_expenses = result[0] # Получение базовых расходов за день cursor.execute("SELECT SUM(amount) " "FROM expenses " "WHERE DATE(created)=DATE('now', 'localtime') " "AND category_codename IN (select codename FROM categories WHERE is_base_expense=true) " f"AND user_id = {user_id}") result = cursor.fetchone() base_today_expenses = result[0] if result[0] else 0 answer_message = ("Расходы сегодня:\n" f"всего — {all_today_expenses} руб.\n" f"базовые — {base_today_expenses} руб. из {get_daily_limit(user_id=user_id)} руб.\n\n" "За текущий месяц: /месяц") return answer_message
def info(self): if not self._info: c = get_cursor() c.execute('select info from submission where rowid = ?', (self.sid, )) self._info = c.fetchone()[0] return self._info
def new_deployment_pipeline( branch_id, copy_config_id=None, copy_env_id=None, automatic=False, ): """ Make a new deployment pipeline from a branch Will create new config and environments as copies of the given config and environments, or use a unit config and environment. """ # make config config_id = new_config(copy_config_id) # make env env_id = new_env(copy_env_id) # insert record cursor = get_cursor() cursor.execute( "INSERT INTO deployment_pipeline " + \ "(branch_id, config_id, environment_id, automatic) " + \ "VALUES (%s, %s, %s, %s) " + \ "RETURNING deployment_pipeline_id", (branch_id, config_id, env_id, automatic), ) deployment_pipeline_id = cursor.fetchone()[0] cursor.close() return deployment_pipeline_id
async def stats(self, ctx): if not util.check_ratelimiting(ctx): return process = psutil.Process(os.getpid()) uptime = time.time() - start_time cur = db.get_cursor() db.execute(cur, 'SELECT * FROM stats') row = cur.fetchone() result = dict(row) result['memory_usage'] = "{:.3f} MB".format(process.memory_info().rss / 1024 / 1024) result['database_size'] = "{:.3f} MB".format(os.path.getsize(config.DB_PATH) / 1024 / 1024) result['uptime'] = to_hhmmss(uptime) embed = discord.Embed(title='Bot Statistics', colour=0x3498DB) embed.set_thumbnail(url=self.bot.user.avatar_url) for col in result: val = result[col] valstr = str(result[col]) if isinstance(val, int): valstr = f'{val:,}' embed.add_field(name=str(col), value=valstr, inline=True) await ctx.send(embed=embed)
def floors(): cur = get_cursor() building = request.args.get('building') cur.execute('select distinct floor from rooms where building = ?', (building,)) return jsonify({ 'building': building, 'floors': to_array(cur.fetchall())})
def get_day_stats(budget_id: int) -> str: """ Gets statistics for the day Parameters: budget_id: int — the budget's id Returns: stats: str — the statistics of the budget for the month, in str format """ cursor = db.get_cursor() cursor.execute( "SELECT SUM(amount) " "FROM expenses " "WHERE DATE(created) == DATE('now', 'localtime')" f"AND budget_id = {budget_id}" ) result = cursor.fetchone() budget_name_mrkdwn = budgets.get_budget_name(budget_id).replace("_", "\\_") stats = f"Бюджет: \"{budget_name_mrkdwn}\"\n\n" if result[0] is None: stats += "Сегодня ещё не было расходов." else: today_exp_sum = result[0] stats += f"Всего потрачено: {today_exp_sum} руб." return stats
def main(): """Read, Filter, & Update the Old Listings, exporting data for US CoHo.""" cursor = db.get_cursor() communities = db.get_communities(cursor) cutoff_date = datetime.datetime(2016, 12, 5) communities = [ db.add_community_metas(cursor, community) for community in communities if cutoff_date - community.get('updated_at', cutoff_date) > datetime.timedelta( days=365.5 * 2) and community.get('post_status', '') == 'publish' ] us_coho_communities = [ community for community in communities if community.get('country', None) == 'United States' and 'Cohousing' in community.get('community_types', '') ] csv_exports.export_dictionaries( 'unpublished_coho_communities.csv', [item_to_export(c) for c in us_coho_communities]) csv_exports.export_dictionaries('unpublished_communities.csv', [item_to_export(c) for c in communities]) _ = [ db.unpublish_community(cursor, community) for community in communities ]
def login_user(request): returnValue = None statusCode = None setCookie = False # open connection connection = db.get_connection() cursor = db.get_cursor(connection) try: # process request jsonUser = request.get_json(force=True) user = db.try_login_user(cursor, jsonUser["username-input-login"], jsonUser["password-input-login"]) if user: tokens.set_user_session(user) setCookie = True returnValue = jsonify(user) statusCode = 200 else: returnValue = jsonify({'error': 'UNKNOWN_CREDENTIALS'}) statusCode = 202 except Exception as e: print('Caught this error: ' + repr(e)) returnValue = jsonify({'error': str(e)}) statusCode = 500 finally: # close connection db.close_cursor(cursor) db.close_connection(connection) response = make_response(returnValue, statusCode) if setCookie: tokens.set_user_cookie(response) return response
def get_month_statistics(userr_id) -> List[Expense2]: cursor = db.get_cursor() cursor.execute(f"select c.name,sum(e.amount) as amount from expense e left join category c on e.category_name=c.name where strftime('%m',e.created)=strftime('%m',datetime('now', 'localtime')) and e.user_id={userr_id} group by c.name") rows = cursor.fetchall() base_today_expenses = [Expense2(amount=row[1], category_name=row[0]) for row in rows] #base_today_expenses = result[0] if result[0] else 0 return base_today_expenses
def get_today_statistics(userr_id) -> List[Expense2]: cursor = db.get_cursor() cursor.execute(f"select e.alias as name,sum(e.amount) as amount from expense as e where date(e.created)=date('now', 'localtime') and e.user_id={userr_id} group by e.alias") rows = cursor.fetchall() base_today_expenses = [Expense2(amount=row[1], category_name=row[0]) for row in rows] #base_today_expenses = result[0] if result[0] else 0 return base_today_expenses
def run_params(release_id): """ return the paramaters needed for a run on gce """ cursor = get_cursor() cursor.execute( ("SELECT service_name\n" " ,branch_name\n" " ,c.config_id\n" " ,key_value_pairs\n" " ,commit_hash\n" " ,image_name\n" " FROM release r\n" " JOIN iteration i\n" " ON i.iteration_id = r.iteration_id\n" " JOIN branch b\n" " ON b.branch_id = i.branch_id\n" " JOIN deployment_pipeline d\n" " ON b.branch_id = d.branch_id\n" " AND d.deployment_pipeline_id = r.deployment_pipeline_id\n" " JOIN config c\n" " ON c.config_id = d.config_id\n" " JOIN environment e\n" " ON e.environment_id = d.environment_id\n" " JOIN feature f\n" " ON f.feature_id = b.feature_id\n" " JOIN service s\n" " ON s.service_id = f.service_id\n" " WHERE release_id = %s\n" " AND infrastructure_backend = %s"), (release_id, "gce"), ) result = cursor.fetchall() cursor.close() return result
async def quote(self, ctx, id=-1): if not util.check_ratelimiting(ctx): return with ctx.typing(): cur = db.get_cursor() # specific quote id if id != -1: db.execute(cur, 'SELECT * FROM quotes WHERE id=? LIMIT 1', (id,)) # random quote else: db.execute(cur, 'SELECT * FROM quotes ORDER BY RANDOM() LIMIT 1') row = cur.fetchone() if row is None: await ctx.reply('There is no quote with ID ' + str(id)) else: user = self.bot.get_user(int(row["submitter"])) username = row["submitter_name"] if not user is None: username = user.name speaker = self.get_speaker(row["speaker"]) dt = datetime.datetime.utcfromtimestamp(row["date"] / 1000) embed = discord.Embed(title='Quote #' + str(row["id"]), colour=0xFFFFFF, description=row["text"], timestamp=dt) embed.set_author(name=speaker["name"], icon_url=speaker["picture_url"], url="http://bot.montclairpublicaccess.info/quotes.php") embed.set_footer(text="Submitted by " + username) await ctx.reply(embed=embed)
def get_images(): """ Get all images from /images endpoint and add it to db. """ conn = get_conn() cursor = get_cursor(conn) token = get_token() if token.get("status") == "error": return {"status": "error", "message": token.get("message")} page = 1 first_page_response = get_page_of_images(page, token["data"]) if first_page_response.get("status") == "error": return { "status": "error", "message": first_page_response.get("message") } results = [] results.extend(first_page_response["pictures"]) total_pages = first_page_response["pageCount"] while page < total_pages: response = get_page_of_images(page + 1, token["data"]) if response.get("status") == "error": return {"status": "error", "message": response.get("message")} results.extend(response["pictures"]) page += 1 for i, image in enumerate(results, start=1): if i % 10 == 0: logger.info(f"Getting details for image {i}/{len(results)}") item = get_image_info(image["id"], token["data"]) add_to_db(item, conn, cursor) conn.close() logger.info("Received all images") return {"status": "success"}
def retrieve_all(cls): conn, c = get_cursor() _sql = """ SELECT id, chart_type, chart_params, data_provider FROM chart """ for i, row in enumerate(c.execute(_sql)): chart_params = json.loads(row[-2]) rule = json.loads(row[-1]) if isinstance(rule, list) and len(rule) == 2: data_provider = lambda: (_param_provider_map['single'][rule[0][ 'param']], _param_provider_map['single'][rule[1]['param']]) elif isinstance(rule, dict): data_provider = _param_provider_map['by_time'][rule['param']] else: raise Exception('Unsupported structure') ch = cls.renderer_class(xy_data=data_provider, machine_id=chart_params['machine_id'], title=chart_params['title'], label=chart_params['label'], line_color=chart_params['line_color'], point_color=chart_params['point_color'], fill=chart_params['fill'], width=chart_params['width'], height=chart_params['height']) yield ch.render()
def get_month_statistics() -> str: """Returns a string of expense statistics for the current month""" now = _get_now_datetime() first_day_of_month = f'{now.year:04d}-{now.month:02d}-01' cursor = db.get_cursor() cursor.execute( f"select sum(amount) " f"from expense where date(created) >= '{first_day_of_month}'") result = cursor.fetchone() if not result[0]: return 'There are no expenses yet this month' all_today_expenses = result[0] cursor.execute( f"select sum(amount) " f"from expense where date(created) >= '{first_day_of_month}' " f"and category_codename in (select codename " f"from category where is_base_expense=true)") result = cursor.fetchone() base_today_expenses = result[0] if result[0] else 0 return (f"Expenses in the current month:\n" f"Total — {all_today_expenses} rub\n" f"Base — {base_today_expenses} rub out of " f"{now.day * _get_budget_limit()} rub")
def save_feed_entries(entries): """ Stores the given list of entries in the database Arguments: * feed_id - id of the feed to store the entries under * entries - a list of feed entries """ cursor = db.get_cursor(db.get_connection()) insert_stmt = """INSERT INTO entries( item_id, entry_published, entry_title, entry_author, entry_link, feed_id ) VALUES ( %s, %s, %s, %s, %s, %s );""" for entry in entries: try: cursor.execute(insert_stmt, entry) cursor.connection.commit() except IntegrityError as ie: err = errorcodes.lookup(ie.pgcode) if(err != 'UNIQUE_VIOLATION'): # Unique violation logger.info("Integrity error: %s", ie) raise cursor.connection.rollback() cursor.connection.commit() # Probably not neccesary cursor.close() return True
def load(self): c = get_cursor() # title & description c.execute('select title, desc from problem where rowid = ?', (self.pid, )) self.title, self.desc = c.fetchone() # tags c.execute('select tag from tag where pid = ?', (self.pid, )) self.tags = [v[0] for v in c] # testcode c.execute('select lang, code from testcode where pid = ?', (self.pid, )) self.testcodes = dict(list(c)) # snippet c.execute('select lang, code from snippet where pid = ?', (self.pid, )) self.snippets = dict(list(c)) # last submission code self.last_submission_codes = {} for lang in self.snippets: code = self.last_submission_code(lang) self.last_submission_codes[lang] = code # submissions c.execute('select count(*) from submission where pid=?', (self.pid, )) self.n_submissions = c.fetchone()[0] c.execute('select count(*) from submission where pid=? and state=?', (self.pid, 'accepted')) self.n_acceptions = c.fetchone()[0] # loaded self.dirty = False
def adminpanel_db_delete(): with db.dataBaseLock: cursor = db.get_cursor() connection = db.get_connection() drop_all_tables(cursor) connection.commit() return redirect(url_for('adminpanel.adminpanel_func'))
def get_problem_by_pid(pid): c = get_cursor() c.execute('select rowid, title from problem where rowid = ?', (pid, )) r = c.fetchone() if not r: return Problem(None) return Problem(*r, load=True)
def get_month_statistics(user_id: int) -> str: """ Функция получения статистики за месяц :param user_id: идентификатор пользователя в VK :return: сообщения о расходах за месяц """ cursor = db.get_cursor() now = _get_now_datetime() first_day_of_month = f'{now.year:04d}-{now.month:02d}-01' # Получения всех расходов за месяц cursor.execute("SELECT SUM(amount) " "FROM expenses " f"WHERE DATE(created) >= '{first_day_of_month}' " f"AND user_id = {user_id}") result = cursor.fetchone() if not result[0]: return "В этом месяце ещё нет расходов" all_today_expenses = result[0] # Получение базовых расходов за месяц cursor.execute("SELECT SUM(amount) " f"FROM expenses WHERE DATE(created) >= '{first_day_of_month}' " "AND category_codename IN (SELECT codename FROM categories WHERE is_base_expense=true) " f"AND user_id = {user_id}") result = cursor.fetchone() base_today_expenses = result[0] if result[0] else 0 answer_message = (f"Расходы в текущем месяце:\n" f"всего — {all_today_expenses} руб.\n" f"базовые — {base_today_expenses} руб. из " f"{now.day * get_daily_limit(user_id=user_id)} руб.") return answer_message
def main(): cursor = get_cursor() communities = get_communities(cursor) today = datetime.datetime.now() communities = [ community for community in communities if today - community.get('updated_at', today) < datetime.timedelta(days=365.5*2) and community.get('country', '') in ['United States', 'Canada'] ] data = [ {'community': get_community_name(c), 'contact name': c.get('contact_names_public', ''), 'contact email': c.get('contact_email_public', ''), 'phone number': c.get('contact_phone_public', ''), 'country': c.get('country', ''), 'state': c.get('state', ''), 'province': c.get('state/province', ''), 'city': c.get('city/town/village', ''), 'zip': c.get('postal_code', ''), 'street': c.get('street_address_line_1', ''), 'street2': c.get('street_address_line_2', '')} for c in communities ] export_dictionaries('us_ca_updated_listings.csv', data)
def do_import(): musicbrainzngs.set_useragent("SBUIfy", "0.0.1", "*****@*****.**") logging.getLogger('musicbrainzngs').setLevel(logging.WARNING) db.init() # remove existing data print('deleting existing data...') # delete_images() todo: only delete artist & album images; move to image module db.execute_script('truncate.sql') print('done.', flush=True) # import albums, songs, and artists for all labels db.execute("SELECT * FROM label") for label in db.get_cursor().fetchall(): print('importing label {}...'.format(label['name'])) import_label_releases(label['id'], label['mbid']) print('done.', flush=True) # wrap up print('finishing...') db.execute_script('set_hibernate_sequence.sql') print('done.') db.close()
def leaderboardFunction(): with db.dataBaseLock: cursor = db.get_cursor() connection = db.get_connection() cursor.execute("""select username, email from users""") lst = cursor.fetchall() usr = session['username'] cursor.execute("""select friend from friends where (username = %s) """, (session['username'],)) tmp = cursor.fetchall() friends = [] requested = [] for it in tmp: friends.append(it[0]) cursor.execute("""select friend from friendrequests where (sender = %s) """, (session['username'],)) friendRequests = cursor.fetchall() for it in friendRequests: requested.append(it[0]) comingRequests = [] cursor.execute("""select sender from friendrequests where (friend = %s) """, (session['username'],)) tmp2 = cursor.fetchall() for it in tmp2: comingRequests.append(it[0]) session['url'] = url_for('leaderboard.leaderboardFunction') return render_template('leaderboard.html', lst=lst, len = len(lst), usr = usr, friends=friends, requested=requested, comingRequests=comingRequests)
def change_phone_by_name(user_input): ''' SQL query for menu #6 @param user_input dict User input needed for the query @return list of tuples The records yielded by the query ''' with db.get_cursor() as cursor: sql = """UPDATE applicants SET phone_number = %s WHERE first_name = %s AND last_name = %s;""" data = (user_input['phone_number'], user_input['first_name'], user_input['last_name'] ) cursor.execute(sql, data) sql2 = """SELECT first_name || ' ' || last_name as full_name, phone_number FROM applicants WHERE first_name = %s AND last_name = %s;""" data2 = (user_input['first_name'], user_input['last_name'] ) cursor.execute(sql2, data2) records = cursor.fetchall() return records
def userApi(username): cursor = db.get_cursor() connection = db.get_connection() res = user_inform(cursor, username) connection.commit() print("res", res) return jsonify(res)
def deleteMessage(): message_id = request.form.get('message_id') cursor = db.get_cursor() connection = db.get_connection() delete_message(cursor, message_id) connection.commit() return jsonify({'result': 'success'})
def add(): if not session.get('logged_in'): return redirect(url_for('loginB.login_func')) with db.dataBaseLock: cursor = db.get_cursor() connection = db.get_connection() username = request.form.get('username') if username == session['username']: return render_template('logged.html', error='it is you') #print("username", username) if len(username) < 5: return render_template('logged.html', error='username is too short (min 5 char)') cursor.execute( """select userpassword from users where (username = %s) """, (username, )) passTuple = cursor.fetchone() if passTuple == None: return render_template('logged.html', error='user not found') cursor.execute( """select username,friend from friends where(username=%s and friend=%s)""", (session['username'], username)) is_friend = cursor.fetchone() if is_friend is not None: return render_template('logged.html', error='Already friend') try: friend_request(cursor, session['username'], username) except: return render_template('logged.html', error='error') finally: connection.commit() return redirect(url_for('leaderboard.leaderboardFunction'))
def idem_release_in_automatic_pipelines(iteration_id): """ Idempotently release an iteration in all it's automatic pipelines The idempotency is gained by a uniqueness constraint in the database on the (iteration_id, deployment_pipeline_id) pair. only the first insert can happen without a conflict (which we will need to catch and do nothing). when we are on postgres 9.5 we can do this with "ON CONFLICT DO NOTHING" """ cursor = get_cursor() try: cursor.execute( "INSERT INTO release (iteration_id, deployment_pipeline_id)\n" + \ " SELECT iteration_id, deployment_pipeline_id\n" + \ " FROM iteration\n" + \ " JOIN branch USING (branch_id)\n" + \ " JOIN deployment_pipeline USING (branch_id)\n" + \ " WHERE iteration_id = %s\n" + \ " RETURNING release_id", (iteration_id,), ) except: pass release_ids = cursor.fetchall() cursor.close() return release_ids
def get_previous_month_statistics() -> str: """Returns a string of statistics on expenses for the previous month""" now = _get_now_datetime() prev_last_day = now.replace(day=1) - datetime.timedelta(days=1) prev_first_day = prev_last_day.replace(day=1) first_day_of_previous_month = f'{prev_first_day.year:04d}-{prev_first_day.month:02d}-01' last_day_of_previous_month = f'{prev_last_day.year:04d}-{prev_last_day.month:02d}-{prev_last_day.day}' cursor = db.get_cursor() cursor.execute( f"select sum(amount) " f"from expense where date(created) " f"between '{first_day_of_previous_month}' and '{last_day_of_previous_month}'" ) result = cursor.fetchone() if not result[0]: return "There were no expenses in previous month" all_prev_month_expenses = result[0] cursor.execute( f"select sum(amount) " f"from expense where date(created) >= '{first_day_of_previous_month}' " f"and date(created) <= '{last_day_of_previous_month}' " f"and category_codename in (select codename " f"from category where is_base_expense=true)") result = cursor.fetchone() base_prev_month_expenses = result[0] if result[0] else 0 return ( f"Previous month expenses:\n" f"total - {all_prev_month_expenses} hrn.\n" f"base expenses - {base_prev_month_expenses} hrn. out of {prev_first_day.day * _get_budget_limit()} hrn." )
def get_modified_configpath_db(configpath): "gets recorded modified time as long in microsecs, no transaction" with db.get_cursor() as cur: try: cur.execute(_SELECT_MODIFIED_CONFIG, [configpath]) return _int_from_fetchone(cur.fetchone()) except Exception as data: db.log_error("get_modified_configpath_db -> None", data) return None # most likely no refresh of config file now
def get_configpath(): "gets the configpath" with db.get_cursor() as cur: try: cur.execute(_SELECT_CONFIGPATH) return db.one_or_none(cur) except Exception as data: db.log_error("get_configpath -> None", data) return None
def rooms(): cur = get_cursor() building = request.args.get('building') floor = request.args.get('floor') cur.execute('select room from rooms where building = ? and floor = ?', (building, floor)) return jsonify({ 'building': building, 'floor': floor, 'rooms': to_array(cur.fetchall()) })
def rooms_of_lesson(): cur = get_cursor() lesson = request.args.get('lesson').encode('utf-8') date = request.args.get('date') weekday = datetime.strptime(date, '%Y-%m-%d').isoweekday() cur.execute('select building, room, teacher, beg, end from lessons where lesson = %s and weekday = %s', (lesson, weekday)) r = cur.fetchall() print r return jsonify({'rooms': r, 'lesson': lesson})
def save_to_db(all_sets_json, keyword_id): formated_json = [(x["id"], to_json(x)) for x in to_object(all_sets_json)] cursor = get_cursor() for quizlet_id, json in formated_json: cursor.callproc( "create_or_update_quizlet", (quizlet_id, keyword_id, json)) cursor.connection.commit() cursor.close()
def floors(): cur = get_cursor() try: building = request.args.get('building').encode('utf-8') except AttributeError: return error('Require <building> to be specified.') cur.execute('select distinct floor from rooms where building = %s', (building,)) return jsonify({ 'building': building, 'floors': to_array(cur.fetchall())})
def delete_user_deck(deck_id): try: cursor = get_cursor() cursor.execute( "DELETE FROM UserDecks WHERE deck_id=%s", (deck_id)) cursor.connection.commit() cursor.close() return "{success:'OK'}" except: return '{"error":"deck not found"}'
def apply_config(configpath): "read and apply a configuration" configuration = read_config(configpath) ctimef = get_modified_configpath_fs(configpath) # of file with db.get_cursor() as cur: cur.execute("BEGIN EXCLUSIVE") # we are the only ones for dic in configuration: cur.execute(_APPLY_CONFIG_UPDATE, [dic["z"], dic["which"]]) cur.execute(_UPDATE_MODIFIED_CONFIG, [str(ctimef), configpath]) cur.connection.commit() return True # we initialized
def rooms_of_lesson(): cur = get_cursor() lesson = request.args.get('lesson') date = request.args.get('date') weekday = datetime.strptime(date, '%Y-%m-%d').isoweekday() cur.execute('select building, room, teacher, beg, end from lessons where lesson = ? and weekday = ?', (lesson, weekday)) r = cur.fetchall() r = [dict(zip(('building', 'room', 'teacher', 'beg', 'end'), t)) for t in r] return jsonify({'rooms': r, 'lesson': lesson})
def save_user_deck(): try: json = request.args["json"] google_id = request.args["google_id"] cursor = get_cursor() cursor.execute( "INSERT INTO UserDecks (google_id,json) VALUES (%s, %s)", (google_id, json)) user_deck_id = cursor.lastrowid cursor.connection.commit() cursor.close() return to_json(query_one("SELECT * FROM UserDecks WHERE deck_id = %s", user_deck_id)) except: return '{"error":"not saved"}'
def update_user_deck(): try: json = request.args["json"] deck_id = request.args["deck_id"] cursor = get_cursor() cursor.execute( """UPDATE UserDecks SET json=%s WHERE deck_id=%s""", (json, deck_id)) user_deck_id = cursor.lastrowid cursor.connection.commit() cursor.close() return to_json(query_one("SELECT * FROM UserDecks WHERE deck_id = %s", user_deck_id)) except: return '{"error":"not saved"}'
def rooms(): cur = get_cursor() try: building = request.args.get('building').encode('utf-8') floor = request.args.get('floor').encode('utf-8') except AttributeError: return error('Require <building> and <floor> to be specified.') cur.execute('select room from rooms where building = %s and floor = %s', (building, floor)) return jsonify({ 'building': building, 'floor': floor, 'rooms': to_array(cur.fetchall()) })
def main(): #conn = sqlite3.connect(db_file) conn = db.get_connection() blog_c = db.get_cursor(conn) if len(sys.argv) > 2 and sys.argv[1] == "create" : # Do some fetching of the URL - such that 1. we check it exists and # works, 2 get title and link if len(sys.argv) < 3: print("A url is needed when creating") return -1 feed_url = sys.argv[2] feed_title, feed_link = getFeedInfo(feed_url) blog_c.execute("INSERT INTO blogs(blog_url,blog_title) VALUES(?,?)", (feed_url,feed_title) ) conn.commit() blog_c.close() elif len(sys.argv) > 1 and sys.argv[1] == "logs" : print("Printing logs") elif len(sys.argv) > 1 and sys.argv[1] == "list": print("Listing feeds") blog_c.execute("SELECT feed_id, feed_url, feed_ttl, feed_title FROM feeds WHERE feed_id > 0;") blogs = blog_c.fetchall() blog_c.close() print(blogs) elif len(sys.argv) > 2 and sys.argv[1] == "fetch": print("Fetching feed") blog_id = int(sys.argv[2]) blog_c.execute("SELECT feed_id, feed_url FROM feeds WHERE feed_id = %s;", (blog_id,)) blog = blog_c.fetchall()[0] blog_c.close() print("Now fetching feed") entries = fetch_feed(blog[1], blog[0]) print("Fetched feed, now storing") save_feed_entries(entries) else: blog_c.execute("SELECT blog_id, blog_url FROM blogs WHERE blog_id > 0;") blogs = blog_c.fetchall() blog_c.close() for blog in blogs: start = int(time()) entries = fetch_feed(blog[1], blog[0]) save_feed_entries(entries)
def test_save_is_idempotent(self, text): """ when we save stuff more than once it should only get in once """ # run SUT cursor = get_cursor() service_id = save( cursor, 'service', # table name ['service_name'], # unique columns ['service_name'], # all columns (text,), # values to insert ) cursor.execute( "SELECT service_id, service_name\n" + \ "FROM service\n" + \ "WHERE service_name=%s", (text,), ) first_result = cursor.fetchall() cursor.close() cursor = get_cursor() service_id = save( cursor, 'service', # table name ['service_name'], # unique columns ['service_name'], # all columns (text,), # values to insert ) cursor.execute( "SELECT service_id, service_name\n" + \ "FROM service\n" + \ "WHERE service_name=%s", (text,), ) second_result = cursor.fetchall() # confirm that the second save did not change the result self.assertEqual(first_result, second_result)
def get_decks_from_database(user_id): cursor = get_cursor() output = cursor.execute(""" SELECT json FROM UserDecks WHERE user_id = %s """, (user_id,)) decks = make_dicts(cursor, cursor.fetchall()) cursor.close() for deck in decks: try: deck["json"] = to_object(deck["json"]) except JSONDecodeError: pass return decks
def lessons(): cur = get_cursor() building = request.args.get('building') room = int(request.args.get('room')) date = datetime.strptime(request.args.get('date'), '%Y-%m-%d') weekday = date.isoweekday() cur.execute('select * from lessons where building = ? and room = ? and weekday = ?', (building, room, weekday)) r = cur.fetchall() r = [dict(zip(('lesson', 'teacher', 'building', 'floor', 'room', 'weekday', 'beg', 'end'), t)) for t in r] for t in r: t['date'] = date.strftime('%Y-%m-%d') del t['weekday'] return jsonify({'lessons': r})
def get_decks_from_database(keyword): cursor = get_cursor() cursor.execute(""" SELECT keyword_id, keyword, json, terms_selected/GREATEST(times_deck_selected,1) as avg_selected FROM Keywords NATURAL JOIN KeywordsQuizletDecks NATURAL JOIN QuizletDecks WHERE keyword = %s """, (keyword,)) decks = make_dicts(cursor, cursor.fetchall()) cursor.close() for deck in decks: try: deck["json"] = to_object(deck["json"]) except JSONDecodeError: pass return decks
def new_config(based_on_id=None): """ Make a new config based on the given config or an empty one """ if based_on_id: based_on_config = get_config(based_on_id) key_value_pairs_text = based_on_config['key_value_pairs'] else: key_value_pairs_text = '' cursor = get_cursor() cursor.execute( "INSERT INTO config (key_value_pairs) VALUES (%s) RETURNING config_id", (key_value_pairs_text,), ) config_id = cursor.fetchone()[0] cursor.close() return config_id
def lessons(): cur = get_cursor() try: building = request.args.get('building').encode('utf-8') room = request.args.get('room') date = request.args.get('date') except AttributeError: return error('Require <building> and <room> and <date> to be specified.') room = int(room) date = datetime.strptime(date, '%Y-%m-%d') weekday = date.isoweekday() cur.execute('select * from lessons where building = %s and room = %s and weekday = %s', (building, room, weekday)) r = cur.fetchall() for t in r: t['date'] = date.strftime('%Y-%m-%d') del t['weekday'] return jsonify({'lessons': r})
def getter(__val__=default, **kwargs): """ return the values in the key passed to the factory a single keyword argument may be passed to specify an alternate key >>> getter(x) # looks for key=x where key was passed to make_getter >>> getter(myKey=y) # looks for myKey=y """ # we need either a single kwarg telling us the key(override) and the value if len(kwargs) == 1: __key__ = list(kwargs.keys())[0] __val__ = kwargs[__key__] # or we need to be passed a value for this getter's default key else: if __val__ == default: raise LookupError("please provide a value to look for") __key__ = key sql_template = "SELECT {values} FROM {table_name} WHERE {key}=%s" sql = sql_template.format( table_name=table_name, values=values, key=__key__, ) cursor = get_cursor() cursor.execute(sql, (__val__,)) if cursor.description is None: cursor.close() return {} row_dict = dict( zip([c[0] for c in cursor.description], cursor.fetchone() ) ) cursor.close() return row_dict
def handle_build(service_name, branch_name, merge_base_commit_hash, commit_hash, image_name): """ Save the data going into this build, then deploy the build Service, branch, commit and image are idempotently saved to the database. """ cursor = get_cursor() service_id = save( cursor, 'service', # table name ['service_name'], # unique columns ['service_name'], # columns ( service_name,), # values ) branch_id = save( cursor, 'branch', # table ['branch_name', 'merge_base_commit_hash', 'deleted_dt'], # unique ['branch_name', 'merge_base_commit_hash', 'service_id'], # columns ( branch_name , merge_base_commit_hash , service_id ), # values ) iteration_id = save( cursor, 'iteration', # table name ['commit_hash', 'branch_id'], # unique columns ['commit_hash', 'branch_id', 'image_name'], # columns ( commit_hash , branch_id , image_name ), # values ) config_id = correct_qa_config(cursor, branch_id, merge_base_commit_hash) release_id = save( cursor, 'release', # table name [], # unique columns ['iteration_id', 'config_id'], # columns ( iteration_id , config_id ), # values ) cursor.close()
def set_iteration(iteration_id, updates): """ Make updates to an iteration """ cursor = get_cursor() sql_set_strings = [] sql_values = [] for k, v in sorted(dict(updates).items()): sql_set_strings.append("{}=%s".format(k)) sql_values.append(v) set_string = ','.join(sql_set_strings) sql_values.append(iteration_id) cursor.execute( "UPDATE iteration " + \ "SET {} ".format(set_string) + \ "WHERE iteration_id=%s", tuple(sql_values), ) cursor.close()
def get_or_create_keyword(keyword): created = False output = query_one(""" SELECT keyword_id, last_updated FROM Keywords where keyword = %s """, (keyword,)) if output: keyword_id, last_updated = output else: keyword_id = None if not keyword_id: created = True cursor = get_cursor() cursor.execute(""" INSERT INTO Keywords (keyword) VALUES (%s) """, (keyword,)) keyword_id = cursor.lastrowid last_updated = datetime.now() cursor.connection.commit() cursor.close() return keyword_id, last_updated, created
def update_config(configpath=None): "applies configpath if modification date is newer than last recorded" if not configpath: configpath = get_configpath() ctimeu = get_modified_configpath_db(configpath) # last recorded if ctimeu == None: # db under load?, do not update now db.log_error("update_config: ctime is None, no update") return False ctimef = get_modified_configpath_fs(configpath) # of file # print("last modified: %d\n last updated: %d\n" % (ctimef, ctimeu)) if ctimef > ctimeu: # need to update # diff = float(ctimef-ctimeu)/_MILLION # print("need to update, secs diff:", "%1.6f" % diff) configuration = read_config(configpath) # read configfile with db.get_cursor() as cur: try: cur.execute("BEGIN EXCLUSIVE") # we are the only ones # check that there has been no intermediate update # a little bit like double checked locking... cur.execute(_SELECT_MODIFIED_CONFIG, [configpath]) ctimeu2 = _int_from_fetchone(cur.fetchone()) # print(ctimeu, ctimeu2) if ctimeu != ctimeu2: # unhealthy int equals (a float in there) # no need to rollback, yet msg = "update_config: somebody else updated it %d " db.log_warn(msg % ctimeu2) return False # somebody else has updated it # update values for dic in configuration: cur.execute(_APPLY_CONFIG_UPDATE, [dic["z"], dic["which"]]) # update modification date last, need to use very long number # that's why we hold TEXT not INTEGER or REAL cur.execute(_UPDATE_MODIFIED_CONFIG, [str(ctimef), configpath]) cur.connection.commit() return True # we initialized except Exception as data: # may timeout db.log_error("update_config -> no update now", data) return False return False # no need to update