def customer_home(): menu = db.get_menu(db.get_connection()) combos = menu[0] pizzas = menu[1] sides = menu[2] subs = menu[3] if request.method == 'POST': if request.form["myforms"]=="order": db.send_order(db.get_connection(),request.form['this_order']) order = request.form['this_order'] subtotal = request.form['this_sub'] tax = request.form['this_tax'] total=(float(subtotal)+float(tax)) session['order']=order session['subtotal']=subtotal session['tax']=tax session['total']=total return redirect(url_for('thank_cus')) return render_template('customer.html', customer=session['username'], combos=combos, pizzas=pizzas, sides=sides, subs=subs)
def testConnection(self): db.reset_connection() try: # This should throw an exception because no connection has been made yet. db.get_connection() self.fail("get_connection didn't throw an exception.") except: pass db.set_database_type("postgres") self.assertNotEqual(db.connect_default(), None) self.assertNotEqual(db.get_connection(), None) try: # This should throw an exception because the database type cannot be # changed while a connection is open. db.set_database_type("postgres") self.fail("set_database_type didn't throw an exception.") except: pass db.reset_connection() try: # This should throw an exception because the connection has already # been reset. db.get_connection() self.fail("get_connection didn't throw an exception.") except: pass
def employee_home(): orders = db.get_orders(db.get_connection()) if request.method == 'POST': if request.form["myforms"]=="complete_order": db.finish_order(db.get_connection(),request.form['order_id']) return redirect(url_for('employee_home')) if request.form["myforms"]=="add_menu": item_type = request.form['type'] if(item_type == "combo"): db.add_combo(db.get_connection(),request.form['item'],request.form['price']) if(item_type == "pizza"): db.add_pizza(db.get_connection(),request.form['item'],request.form['price']) if(item_type == "side"): db.add_side(db.get_connection(),request.form['item'],request.form['price']) if(item_type == "subs"): db.add_sub(db.get_connection(),request.form['item'],request.form['price']) if request.form["myforms"]=="rem_menu": item_type = request.form['type_id'] if(item_type == "combo"): db.delete_combo(db.get_connection(),request.form['item_id']) if(item_type == "pizza"): db.delete_pizza(db.get_connection(),request.form['item_id']) if(item_type == "side"): db.delete_side(db.get_connection(),request.form['item_id']) if(item_type == "subs"): db.delete_sub(db.get_connection(),request.form['item_id']) return render_template('employee.html',employee=session['username'],orders=orders)
def sync_account_region(account_number, region, vpc_id='', new_connection=True): rsp = {} if new_connection: db.get_connection() file_name = '/tmp/account-%s-%s' % (account_number, region) if vpc_id: file_name = '%s-%s' % (file_name, vpc_id) logging.basicConfig(filemode='w', level=logging.INFO, filename=file_name, format="%(asctime)s %(message)s") logging.info('Syncing Account %s %s %s', account_number, region, vpc_id) rsp['vpcs'] = aws_resources.vpc.sync(account_number, region, vpc_id) rsp['route-tables'] = aws_resources.route_table.sync( account_number, region, vpc_id) rsp['subnets'] = aws_resources.subnet.sync(account_number, region, vpc_id) rsp['security-groups'] = aws_resources.security_group.sync( account_number, region, vpc_id) rsp['instances'] = aws_resources.instance.sync(account_number, region, vpc_id) rsp['classic-load-balancers'] = aws_resources.classic_lb.sync( account_number, region, vpc_id) rsp['elastic-load-balancers'] = aws_resources.elastic_lb.sync( account_number, region, vpc_id) rsp['tgw-attachments'] = aws_resources.tgw_attachment.sync( account_number, region, vpc_id) logging.info(json.dumps(rsp, indent=4)) if new_connection: db.close() return rsp
def sync_all_account_regions(): # for each account and region do a sync in parallel (within account/region its sequential) print('Starting sync all accounts/regions at %s' % datetime.datetime.now()) accounts = [] db.get_connection() for account in models.Account.objects: for region in account.regions: accounts.append([account.account_number, region]) db.close() processes = {} for account in accounts: # account is [account_number, region] pname = '%s-%s' % (account[0], account[1]) p = Process(target=sync_account_region, args=(account), name=pname) print('Started', pname) p.start() processes[p.pid] = p print('Started', len(processes), 'processes') wait_for_processes(processes) db.get_connection() for account in db.get_items(models.Account, page_size=0, json_output=False): account.last_updated = datetime.datetime.utcnow() account.save() db.close() print('Ending sync cycle at %s' % datetime.datetime.now()) return
def sync_batch(): processes = {} db.get_connection() for item in models.VpcSyncTask.objects(state='queued'): print('Processing', item.account_number, item.region, item.vpc_id) p = Process(target=sync, args=(item, )) processes[item.id] = p item.state = 'running' item.start_date = datetime.datetime.utcnow() item.save() db.close() for p in processes.values(): p.start() print(p.pid) # wait for the processes db.get_connection() while True: for key in list(processes): process = processes[key] if process.is_alive(): continue process.join() item = models.VpcSyncTask.objects(id=key).first() item.state = 'completed' item.end_date = datetime.datetime.utcnow() item.save() del (processes[key]) if not processes: break else: time.sleep(5) db.close()
def get_or_create_user(bot, update): user = update.message.from_user cursor = db.get_connection().cursor() cursor.execute('SELECT * FROM users WHERE id = %s', (user.id, )) result = cursor.fetchone() if result is None: values = [] for attribute in ['id', 'first_name', 'last_name', 'username']: value = getattr(user, attribute, None) values.append(value) # If command was run in public, ask them to PM us! if update.message.chat_id is not update.message.from_user.id: bot.send_message( chat_id=update.message.chat_id, text= "Hey {}! Please message me at @zenafbot so that I can PM you!". format(get_name(user))) values.append(False) else: values.append(True) cursor.execute( "INSERT INTO users(id, first_name, last_name, username, haspm) VALUES (%s, %s, %s, %s, %s)", values) cursor.execute('SELECT * FROM users WHERE id = %s', (user.id, )) result = cursor.fetchone() db.get_connection().commit() cursor.close() return result
def set_user_height(telegram_id, height: int): sql = ''' UPDATE user SET height = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (height, telegram_id)) db.get_connection().commit()
def set_user_age(telegram_id, age: int): sql = ''' UPDATE user SET age = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (age, telegram_id)) db.get_connection().commit()
def set_lasttime(telegram_id: int, dt_last: str, force_commit: bool = True): sql = ''' UPDATE user SET datetime_last = ? WHERE telegram_id = ? ''' db.get_cursor().execute(sql, (dt_last, telegram_id)) if force_commit: db.get_connection().commit()
def set_user_weight(telegram_id, weight: float): sql = ''' UPDATE user SET weight = ? WHERE telegram_id = ? ''' sqlite3.register_adapter(bool, int) db.get_cursor().execute(sql, (weight, telegram_id)) db.get_connection().commit()
def set_user_sex(telegram_id, sex: bool): sql = ''' UPDATE user SET sex = ? WHERE telegram_id = ? ''' sqlite3.register_adapter(bool, int) db.get_cursor().execute(sql, (sex, telegram_id)) db.get_connection().commit()
def index() -> str: conn = db.get_connection() # user: { # "user_id": "1333463", # "user_name": "Eric Wu", # "password": "******" # } db.create_user(db.get_connection(), '1333463', 'Eric Wu', 'helloworld01') user_name = db.select_user_by_user_id(conn, '1333463') return json.dumps({'a user inserted': user_name })
def activate_changes(self, generate_page = True): if generate_page: pass with db.get_connection(): db.sync_all(db.version_current, db.version_new) db.cleanup_orphan_images()
def build(): encountered_texts = set() trait_rows = [] for row in get_from_datamaster('EquipTraits.csv'): text = row["Text"] if text not in encountered_texts: encountered_texts.add(text) trait_rows.append(row) with get_connection() as con: cur = con.cursor() cur.execute("DROP TABLE IF EXISTS Traits") cur.execute("CREATE TABLE Traits(" "Id INTEGER PRIMARY KEY AUTOINCREMENT, " "TraitType INTEGER, " "Text TEXT, " "FOREIGN KEY(TraitType) REFERENCES TraitTypes(Id))") cur.execute("SELECT TraitTypeName, Id FROM TraitTypes") foreign_keys = {cur_row[0]: cur_row[1] for cur_row in cur.fetchall()} for trait_row in trait_rows: cur.execute("INSERT INTO Traits (" "TraitType, Text) " "VALUES (\"{}\", \"{}\")".format( foreign_keys[trait_row["TraitTypeName"]], trait_row["Text"]))
def initialize(self, Conf=None): self.dbc = get_connection() myconf = Conf x_real_ip = self.request.headers.get("X-Real-IP") self.remote_ip = x_real_ip or self.request.remote_ip if not is_allowed(self.remote_ip, myconf): raise APIError(401)
def main(args): with spreadsheet(SAMPLE_SPREADSHEET_ID, SAMPLE_RANGE_NAME) as values: specimen_model = requests.get( 'https://raw.githubusercontent.com/cr-ste-justine/clin-FHIR/master/specimen.json' ).json() specimen_model.pop('parent') row_parser = RowParser(values[0]) connection = get_connection(args) with db_transaction(connection): for row in values[1:]: specimen_row = row_parser.as_dict(row) specimen = copy.deepcopy(specimen_model) specimen['id'] = specimen_row['id'] specimen['status'] = specimen_row['status'] specimen['subject'][ 'reference'] = f"Patient/{specimen_row['subject']}" specimen['request'][0][ 'reference'] = f"ServiceRequest/{specimen_row['request']}" specimen['container'][0]['identifier'][0][ 'value'] = specimen_row['container.identifier.value'] specimen_json = json.dumps(specimen, ensure_ascii=False) cursor = connection.cursor() insert_query = "insert into specimen (id, txid, resource, status) values (%s, 0, %s, 'created')" cursor.execute(insert_query, (specimen['id'], specimen_json))
def run(): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) insert_values = [] response = request(API_URL_CHAMPIONS, 'global') champions_dict = response['data'] version = response['version'] for champion in champions_dict.values(): insert_values.append(u"({}, {}, {}, {}, {})".format( champion['id'], database.escape(champion['name']), database.escape('http://ddragon.leagueoflegends.com/cdn/{}/img/champion/{}'.format(version, champion['image']['full'])), database.escape('http://ddragon.leagueoflegends.com/cdn/img/champion/loading/' + champion['image']['full'].replace('.png', '_0.jpg')), database.escape('http://ddragon.leagueoflegends.com/cdn/img/champion/splash/' + champion['image']['full'].replace('.png', '_0.jpg')), )) insert_query = u''' INSERT INTO champions (id, name, image_icon_url, image_loading_url, image_splash_url) VALUES {} '''.format(u','.join(insert_values)) database.execute('TRUNCATE TABLE champions') database.execute(insert_query)
def db_get_match_ids(region, summoner, begin_index, end_index): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) sql = u""" SELECT match_id FROM matches WHERE 1 AND match_season = {match_season} AND match_region = {region} AND summoner_id = {summoner_id} {missing_item_temp_fix_sql} ORDER BY match_create_datetime DESC LIMIT {limit} OFFSET {offset} """.format( match_season=database.escape(SEASON_NAME), region=database.escape(region), summoner_id=summoner['id'], limit=(end_index - begin_index), offset=begin_index, missing_item_temp_fix_sql=MISSING_ITEM_TEMP_FIX_SQL, ) logger.warning('[db_get_match_ids] SQL: {}'.format(sql)) return database.fetch_all_value(sql)
def main(args): with spreadsheet(SAMPLE_SPREADSHEET_ID, SAMPLE_RANGE_NAME) as values: study_model = requests.get( 'https://raw.githubusercontent.com/cr-ste-justine/clin-FHIR/master/researchStudy.json').json() row_parser = RowParser(values[0]) connection = get_connection(args) with db_transaction(connection): for row in values[1:]: study_row = row_parser.as_dict(row) study = copy.deepcopy(study_model) study['id'] = study_row['id'] study['status'] = study_row['status'] study['title'] = study_row['title'] study['description'] = study_row['description'] study['sponsor']['reference'] = f"Organization/{study_row['sponsor']}" study['principalInvestigator']['reference'] = f"Practitioner/{study_row['principalInvestigator']}" study['enrollment'][0]['reference'] = f"Group/{study_row['enrolement']}" study['period']['start'] = study_row['period.start'] study['period']['end'] = study_row['period.end'] study_json = json.dumps(study, ensure_ascii=False) cursor = connection.cursor() insert_query = "insert into researchstudy (id, txid, resource, status) values (%s, 0, %s, 'created')" cursor.execute(insert_query, (study['id'], study_json))
def plot_different_clusterings(): connection = db.get_connection() sql = ( "select max(m.mp_score), avg(m.processing_time) as processing_time, m.method from method_evaluation as m" " where m.sample_size < 2000 and m.mp_score is not null" " group by m.method") data = pandas.read_sql(sql=sql, con=connection) connection.close() fig = plt.figure() X = data["processing_time"].values Y = data["mp_score"].values colors = np.random.rand(len(X)) plt.scatter(X, Y, c=colors, alpha=0.5) methods = data["method"].values for index, method in enumerate(methods): plt.annotate(method, (X[index], Y[index]), xytext=(X[index], Y[index] + 0.02)) plt.ylim(top=1, bottom=0) plt.xlabel("Processing time in seconds") plt.ylabel("Accuracy") plt.title("Comparison of different clutering methods") plt.grid(True, "major", ls="--", lw=0.5, c="k", alpha=0.3) plt.savefig("../../doc/images/different_clusterings.png") plt.close(fig)
def le(): con, curs = db.get_connection() curs.execute("SELECT * FROM year") y = curs.fetchall() curs.execute("SELECT * FROM country") c = curs.fetchall() with open("../data/LifeExpectancy.csv", newline='\n') as f: reader = csv.reader(f, delimiter=',') for row in reader: country = row[0] year = row[1] both = row[2] male = row[3] female = row[4] year_id = get_year_id(y, year) country_id = get_country_id(c, country) if year_id is None or country_id is None: print(country, year) else: curs.execute( """ INSERT INTO life_expectancy(country_id, year_id, both, male, female) VALUES(?, ?, ?, ?, ?) """, (country_id, year_id, both, male, female)) con.commit()
def md(): con, cur = db.get_connection() cur.execute("SELECT * FROM year") y = cur.fetchall() cur.execute("SELECT * FROM country") c = cur.fetchall() yy = [2017, 2016, 2015, 2014, 2013, 2012, 2011, 2010] with open("../data/MalariaDeaths.csv", newline='\n') as f: reader = csv.reader(f, delimiter=',') for row in reader: country = row[0] yyy = zip(yy, row[1:]) for q in yyy: year = q[0] data = q[1].split('[')[0].replace(' ', '') year_id = get_year_id(y, year) country_id = get_country_id(c, country) if country_id is not None: cur.execute( """ INSERT INTO malaria_deaths(country_id, year_id, deaths) VALUES(?, ?, ?) """, (country_id, year_id, data)) con.commit()
def run(): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) insert_values = [] for region in ['na', 'euw', 'kr']: for tier in ['MASTER', 'CHALLENGER']: print "Getting {} {}".format(region, tier) players = fetch_challenger_players(region) if tier == 'CHALLENGER' else fetch_master_players(region) for player in players: insert_values.append(u"({}, '{}', {}, '{}', '{}', UTC_TIMESTAMP())".format( player['playerOrTeamId'], region, database.escape(player['playerOrTeamName']), tier, player['division'] )) print "Adding {}".format(region) insert_query = u''' INSERT INTO summoners (id, region, name, rank_tier, rank_division, last_update_datetime) VALUES {} ON DUPLICATE KEY UPDATE rank_tier = VALUES(rank_tier), rank_division = VALUES(rank_division), last_update_datetime = VALUES(last_update_datetime) '''.format(u','.join(insert_values)) database.execute(insert_query)
def main(): conn = db.get_connection() cursor = conn.cursor() parse(cursor, conn) cursor.close() conn.close() print 'done.'
def adminpanel_db_delete(): with db.dataBaseLock: cursor = db.get_cursor() connection = db.get_connection() drop_all_tables(cursor) connection.commit() return redirect(url_for('adminpanel.adminpanel_func'))
def _get_random_user_words(self, max_count=10): user_id = self._extract_user_id() logging.info(f"get {max_count} random words with their translations from user {user_id}") user_words = list() is_enough = False with db.get_connection() as conn: words_t = db.get_table('words') query = words_t.select(words_t.c.user_id == user_id).order_by(func.random()) for row in conn.execute(query): if len(user_words) >= max_count: is_enough = True break if 'raw_data' not in row or not row['raw_data']: logging.warning(f"got incorrect row from database {row}") continue translations = list() logging.warning(json.loads(row['raw_data'])) for _, vals in json.loads(row['raw_data'])['translations'].items(): for elem in vals: translations.append(elem) user_words.append((row['word'], translations)) if not is_enough: logging.warning(f"found not enough user words(need {max_count}, got {len(user_words)}") logging.debug(f"extract words: {user_words}") return user_words
def save_feed_entries(entries): """ Stores the given list of entries in the database Arguments: * feed_id - id of the feed to store the entries under * entries - a list of feed entries """ cursor = db.get_cursor(db.get_connection()) insert_stmt = """INSERT INTO entries( item_id, entry_published, entry_title, entry_author, entry_link, feed_id ) VALUES ( %s, %s, %s, %s, %s, %s );""" for entry in entries: try: cursor.execute(insert_stmt, entry) cursor.connection.commit() except IntegrityError as ie: err = errorcodes.lookup(ie.pgcode) if(err != 'UNIQUE_VIOLATION'): # Unique violation logger.info("Integrity error: %s", ie) raise cursor.connection.rollback() cursor.connection.commit() # Probably not neccesary cursor.close() return True
def plot_event_detection_differences_by_hours(): connection = db.get_connection() sql = ( "select last_processed_date, result, new_rows, mp_score, hours from script_execution" " where hours is not null" " order by last_processed_date") data = pandas.read_sql(sql=sql, con=connection) connection.close() fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, figsize=(15, 5)) _plot_event_differences("hours", data, plt, ax1, ax2, [ 72, 48, 24, ]) ax1.set_title( "Difference in true vs detected events with batch_size n=hours") ax2.set_title( "Difference in true vs detected changes with batch_size n=hours") plt.savefig("../../doc/images/event_detection_differences_hours.png") plt.close(fig)
def find_all(page, pageSize): conn = get_connection() limit = (int(page) - 1) * pageSize cursor = conn.cursor() sql = "select * from bug_record where flag=0 limit %s,%s" try: cursor.execute(sql, [limit, pageSize]) bug = None if conn == "": return bug date = [] for i in cursor: bug = bugInfo() bug.bug_id = i["bug_id"] bug.product = i["product"] bug.bugname = i["bugname"] bug.feedbacktime = i["feedbacktime"] bug.endtime = i["endtime"] bug.feedbackpeople = i["feedbackpeople"] bug.dealpeople = i["dealpeople"] bug.dealstate = i["dealstate"] bug.bugdetail = i["bugdetail"] bug.dealmethod = i["dealmethod"] bug.bugsystem = i["bugsystem"] bug.systemversion = i["systemversion"] bug.productsystem = i["productsystem"] bug.systemmodel = i["systemmodel"] date.append(bug) finally: conn.close() return date
def imr(): con, curs = db.get_connection() curs.execute("SELECT * FROM year") y = curs.fetchall() curs.execute("SELECT * FROM country") c = curs.fetchall() with open("../data/InfantMortalityRate.csv", newline='\n') as f: reader = csv.reader(f, delimiter=',') for row in reader: country = row[0] year = row[1] both = row[2] male = row[3] female = row[4] both_b = row[5] both_c = row[6] male_c = row[7] female_c = row[8] year_id = get_year_id(y, year) country_id = get_country_id(c, country) if year_id is None or country_id is None: print(country, year, country_id, year_id) else: curs.execute( """INSERT INTO infant_mortality_rate(country_id, year_id, both, male, female, both_b, both_c, male_c, female_c) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?) """, (country_id, year_id, both, male, female, both_b, both_c, male_c, female_c)) con.commit()
def plot_hdbscan_parameters(): connection = db.get_connection() sql = ( "select m.mp_score as mp_score, m.parameters, m.real_clusters from method_evaluation as m" " where m.method = 'hdbscan' and m.mp_score is not null" " order by m.real_clusters") data = pandas.read_sql(sql=sql, con=connection) connection.close() X = data["real_clusters"].unique() fig = plt.figure(figsize=(15, 5)) min_size_to_show = [2, 4, 6, 8] marker = {2: 'o', 4: 'D', 6: 's', 8: '^'} plt.subplot(1, 2, 1) Y, Y_lower_err, Y_higher_err = format_data(data, "cosine", "mp_score") for size in min_size_to_show: plt.errorbar(X, list(Y[size]), yerr=[list(Y_lower_err[size]), list(Y_higher_err[size])], fmt=marker[size], capsize=3, alpha=0.5, label="min = {}".format(size)) plt.xlabel("Number of stories") plt.ylabel("MP-Score") plt.ylim(top=1, bottom=0) plt.title("HDBSCAN Min cluster sizes using metric=cosine") plt.legend() plt.grid(True, "major", ls="--", lw=0.5, c="k", alpha=0.3) plt.subplot(1, 2, 2) Y, Y_lower_err, Y_higher_err = format_data(data, "euclidean", "mp_score") for size in min_size_to_show: plt.errorbar(X, list(Y[size]), yerr=[list(Y_lower_err[size]), list(Y_higher_err[size])], fmt=marker[size], capsize=3, alpha=0.5, label="min = {}".format(size)) plt.xlabel("Number of stories") plt.ylabel("MP-Score") plt.ylim(top=1, bottom=0) plt.title("HDBSCAN Min cluster sizes using metric=euclidean") plt.legend() plt.grid(True, "major", ls="--", lw=0.5, c="k", alpha=0.3) plt.savefig("../../doc/images/hdbscan_parameters.png") plt.close(fig)
def calculate_score_and_variance(): connection = db.get_connection() sql = ( "select last_processed_date, result, new_rows, is_full_cluster, nrows, mp_score from script_execution" " where failed = 0 and execution_date < '2019-05-28 14:00:00' and threshold < 0.2 and nrows is not null" " order by last_processed_date") data = pandas.read_sql(sql=sql, con=connection) print_statistics("nrows", data) sql = ( "select last_processed_date, result, new_rows, is_full_cluster, fraction, mp_score from script_execution" " where threshold < 0.2 and fraction is not null" " order by last_processed_date") data = pandas.read_sql(sql=sql, con=connection) print_statistics("fraction", data) sql = ( "select last_processed_date, result, new_rows, is_full_cluster, hours, mp_score from script_execution" " where threshold < 0.2 and hours is not null" " order by last_processed_date") data = pandas.read_sql(sql=sql, con=connection) print_statistics("hours", data) connection.close()
def find_match_ids(region, summoner, begin_index, end_index): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) # if loading the first page and a refresh is possible, pull new data from the api if begin_index == 0 and summoner['can_refresh']: logger.warning(u'[find_match_ids] getting api matches') api_pull_match_history(region, summoner, begin_index) # update the last refresh datetime since we just refreshed sql = u""" UPDATE summoners SET last_refresh_datetime = UTC_TIMESTAMP() WHERE 1 AND region = {} AND id = {} """.format( database.escape(region), summoner['id'], ) database.execute(sql) # try getting match ids match_ids = db_get_match_ids(region, summoner, begin_index, end_index) logger.warning('[find_match_ids] db_get_match_ids FIRST TRY got {}'.format(match_ids)) # if not the first page and no matches were found, try pulling new ones from the api if (begin_index > 0 and not match_ids) or len(match_ids) < MATCHES_PER_PAGE: logger.warning('[find_match_ids] getting api matches 2') api_pull_match_history(region, summoner, begin_index) match_ids = db_get_match_ids(region, summoner, begin_index, end_index) logger.warning('[find_match_ids] db_get_match_ids SECOND TRY got {}'.format(match_ids)) return match_ids
def findbyid(bugid): conn = get_connection() cursor = conn.cursor() sql = "select * from bug_record where flag=0 and bug_id=%s" try: cursor.execute(sql, bugid) bug = None if conn == "": return bug bug = bugInfo() for i in cursor: bug.bug_id = i["bug_id"] bug.product = i["product"] bug.bugname = i["bugname"] bug.feedbacktime = i["feedbacktime"] bug.endtime = i["endtime"] bug.feedbackpeople = i["feedbackpeople"] bug.dealpeople = i["dealpeople"] bug.dealstate = i["dealstate"] bug.bugdetail = i["bugdetail"] bug.dealmethod = i["dealmethod"] bug.bugsystem = i["bugsystem"] bug.systemversion = i["systemversion"] bug.productsystem = i["productsystem"] bug.systemmodel = i["systemmodel"] finally: conn.close() return bug
def recipes(): name01 = request.form['name'] conn = get_connection() c = conn.cursor() if len(name01) > 0: result = c.execute( """ SELECT r.* FROM ingredients AS i JOIN ingredients_to_recipes AS itr JOIN recipes AS r ON i.id = itr.id_ingredients AND itr.id_recipes = r.id WHERE i.name = ?""", (name01, )) recipes = result.fetchall() context = {'recipes': recipes} return render_template('result.html', **context) elif len(name01) == 0: result = c.execute('SELECT * FROM "ingredients" order by "name"') ingredients = result.fetchall() context = {'ingredients': ingredients} return render_template('select.html', **context)
def search(content): conn = get_connection() cursor = conn.cursor() args = ['%' + content + '%'] sql = "select * from bug_record where flag=0 and bugname like %s;" try: cursor.execute(sql, args) bug = None if conn == "": return bug date = [] for i in cursor: bug = bugInfo() bug.bug_id = i["bug_id"] bug.product = i["product"] bug.bugname = i["bugname"] bug.feedbacktime = i["feedbacktime"] bug.endtime = i["endtime"] bug.feedbackpeople = i["feedbackpeople"] bug.dealpeople = i["dealpeople"] bug.dealstate = i["dealstate"] bug.bugdetail = i["bugdetail"] bug.dealmethod = i["dealmethod"] bug.bugsystem = i["bugsystem"] bug.systemversion = i["systemversion"] bug.productsystem = i["productsystem"] bug.systemmodel = i["systemmodel"] date.append(bug) finally: conn.close() return date
def get(self): with db.get_connection() as conn: user_id = self._extract_user_id() words_t = db.get_table('words') # query = sa.select([sa.func.count()]).select_from(words_t).where(words_t.c.user_id==user_id) # logging.warning(query) # word_sum = conn.execute(query).fetchone()[0] query = sa.select([words_t]).select_from(words_t).where( words_t.c.user_id == user_id) #.where( # words_t.c.correct_tested + words_t.c.wrong_tested > 0 # ) rows = conn.execute(query).fetchall() word_sum = 0 known_words_cnt = 0 correct = 0 wrong = 0 for row in rows: correct += row[words_t.c.correct_tested] wrong += row[words_t.c.wrong_tested] word_sum += 1 right_cnt = row[words_t.c.correct_tested] cnt = row[words_t.c.correct_tested] + row[ words_t.c.wrong_tested] if cnt > 0 and right_cnt / cnt > 0.8: known_words_cnt += 1 # users_t = db.get_table('user') # query = sa.select([users_t.c.correct_tested, users_t.c.wrong_tested]).where(users_t.c.id==user_id) # correct, wrong = conn.execute(query).fetchone() logging.debug(word_sum) logging.debug(known_words_cnt) logging.debug(f"{correct}, {wrong}") if correct + wrong == 0: percent = 0 else: percent = correct / (correct + wrong) logging.debug({ 'result': 'ok', 'data': { 'all_words': word_sum, 'known_words': known_words_cnt, 'rating': int(known_words_cnt * percent * 100) } }) self.write( json.dumps({ 'result': 'ok', 'data': { 'all_words': word_sum, 'known_words': known_words_cnt, 'rating': int(known_words_cnt * percent * 100) } }))
def api_pull_match_history(region, summoner, begin_index): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) # always try getting 15 matches (max) at a time end_index = begin_index + 15 # fetch matches from the api logger.warning('[api_pull_match_history] adding request for match history of {}'.format(summoner['id'])) response = request(API_URL_MATCH_LIST, region, summonerId=summoner['id'], beginIndex=begin_index, endIndex=end_index) if response: logger.warning('[api_pull_match_history] got {} matches: [{}]'.format(len(response.get('matches', [])), [str(match['matchId']) for match in response.get('matches', [])])) matches = response.get('matches', []) if matches: # see which matches we already have recorded sql = u""" SELECT match_id FROM matches WHERE 1 AND match_region = {region} AND summoner_id = {summoner_id} AND match_id IN ({match_ids}) {missing_item_temp_fix_sql} """.format( region=database.escape(region), summoner_id=summoner['id'], match_ids=','.join(str(match['matchId']) for match in matches), missing_item_temp_fix_sql=MISSING_ITEM_TEMP_FIX_SQL, ) recorded_match_ids = database.fetch_all_value(sql) logger.warning('[api_pull_match_history] sql: {}'.format(sql)) logger.warning('[api_pull_match_history] recorded match ids: {}'.format(recorded_match_ids)) match_stats = [] for match in matches: # if the match is not already recorded and in this season, then record it if match['matchId'] not in recorded_match_ids and match['season'] == SEASON_NAME: logger.warning('[api_pull_match_history] getting stats for match {}'.format(match['matchId'])) thread = SimpleThread(match_helper.get_stats, matchId=match['matchId'], region=region, detailed=False) match_stats.append(thread) if match_stats: match_stats = [stats.result() for stats in match_stats] logger.warning('[api_pull_match_history] doing player_helper.get_stats()') player_stats = player_helper.get_stats(match_stats, database) logger.warning('[api_pull_match_history] inserting player {}'.format(summoner['id'])) player_helper.insert(player_stats, database, summoner['id']) for match_stat in match_stats: try: logger.warning('[api_pull_match_history] inserting match stats for match {}'.format(match_stat['match']['id'])) match_helper.insert(match_stat, player_stats, database, detailed=False) except Exception, e: logger.warning('[api_pull_match_history] FAILED inserting match stats for match {}: {}'.format(match_stat['match']['id'], e)) pass
def done(phone): conn = db.get_connection() query = ("update smsing set processed=1 where phone='%s'" % phone) cursor = conn.cursor() cursor.execute(query, ()) conn.commit() cursor.close() conn.close()
def get_client_lists_class(): engine, metadata, session = db.get_connection() client_lists_table = Table('SF_CARLYLE_CLIENT_LISTS', metadata, Column('client_id', String(64), nullable=False), Column('list_id', String(256), nullable=False), Column('list_name', String(256), nullable=True) ) return client_lists_table
def get_users_class(): engine, metadata, session = db.get_connection() users_table = Table('SF_CARLYLE_USERS', metadata, Column('first_name', String(64), nullable=True), Column('last_name', String(64), nullable=True), Column('sf_id', String(18), nullable=False, primary_key=True), Column('email', String(256), nullable=True) ) return users_table
def get_phones(): conn = db.get_connection() phones = [] cursor = conn.cursor() query = ("SELECT phone from smsing where processed=0 limit 10") cursor.execute(query, (), ) for (phone,) in cursor: phones.append(phone) cursor.close() conn.close() return phones
def update_analytics_table(df): try: con = db.get_connection() c=con.cursor() df = df[['room_id', 'Date', 'Day', 'GroundTruth', 'SurveyPercentage', 'Capacity', 'Room', 'LogDate', 'MaxCount' , 'AverageCount', 'MedianCount', 'ModeCount', 'Predictions', 'PredictedPercentage']] numpyMatrix = df.as_matrix() for row in numpyMatrix: c.execute('INSERT OR REPLACE INTO analytics VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)', row) con.commit() print("Success! Table created or updated.") except Exception as e: print(e)
def main(): #conn = sqlite3.connect(db_file) conn = db.get_connection() blog_c = db.get_cursor(conn) if len(sys.argv) > 2 and sys.argv[1] == "create" : # Do some fetching of the URL - such that 1. we check it exists and # works, 2 get title and link if len(sys.argv) < 3: print("A url is needed when creating") return -1 feed_url = sys.argv[2] feed_title, feed_link = getFeedInfo(feed_url) blog_c.execute("INSERT INTO blogs(blog_url,blog_title) VALUES(?,?)", (feed_url,feed_title) ) conn.commit() blog_c.close() elif len(sys.argv) > 1 and sys.argv[1] == "logs" : print("Printing logs") elif len(sys.argv) > 1 and sys.argv[1] == "list": print("Listing feeds") blog_c.execute("SELECT feed_id, feed_url, feed_ttl, feed_title FROM feeds WHERE feed_id > 0;") blogs = blog_c.fetchall() blog_c.close() print(blogs) elif len(sys.argv) > 2 and sys.argv[1] == "fetch": print("Fetching feed") blog_id = int(sys.argv[2]) blog_c.execute("SELECT feed_id, feed_url FROM feeds WHERE feed_id = %s;", (blog_id,)) blog = blog_c.fetchall()[0] blog_c.close() print("Now fetching feed") entries = fetch_feed(blog[1], blog[0]) print("Fetched feed, now storing") save_feed_entries(entries) else: blog_c.execute("SELECT blog_id, blog_url FROM blogs WHERE blog_id > 0;") blogs = blog_c.fetchall() blog_c.close() for blog in blogs: start = int(time()) entries = fetch_feed(blog[1], blog[0]) save_feed_entries(entries)
def get_feed_by_id(id): con = db.get_connection() c = con.cursor() c.execute("SELECT feed_id, feed_url FROM feeds WHERE feed_id=%s;", (id,)) feed = c.fetchone() # Apparently recommended con.commit() c.close() con.close() return feed
def get_feeds(): con = db.get_connection() c = con.cursor() c.execute("SELECT feed_id, feed_url, feed_ttl FROM feeds;") feeds = c.fetchall() # Apparently recommended con.commit() c.close() con.close() return feeds
def main(): conn = db.get_connection() cursor = conn.cursor() for line in open('russianmaster.txt'): for email in get_emails(line): print email db.save_email(cursor, email, 'russianmaster', line) for phone in get_phones(line): print phone db.save_phone(cursor, phone, 'russianmaster', line) conn.commit() cursor.close() conn.close() print 'done.'
def get_segment_users_class(): engine, metadata, session = db.get_connection() segment_users_table = Table('SF_CARLYLE_SEGMENT_USERS', metadata, Column('segid', Integer, nullable=False), Column('sf_id', String(18), nullable=False), Column('cf_1', String(250), nullable=True), Column('cf_2', String(250), nullable=True), Column('cf_3', String(250), nullable=True), Column('cf_4', String(250), nullable=True), Column('cf_5', String(250), nullable=True), Column('cf_6', String(250), nullable=True), Column('cf_7', String(250), nullable=True), Column('cf_8', String(250), nullable=True), Column('cf_9', String(250), nullable=True), Column('cf_10', String(250), nullable=True), Column('cf_11', String(250), nullable=True), Column('cf_12', String(250), nullable=True), Column('cf_13', String(250), nullable=True), Column('cf_14', String(250), nullable=True), Column('cf_15', String(250), nullable=True), Column('cf_16', String(250), nullable=True), Column('cf_17', String(250), nullable=True), Column('cf_18', String(250), nullable=True), Column('cf_19', String(250), nullable=True), Column('cf_20', String(250), nullable=True), Column('cf_21', String(250), nullable=True), Column('cf_22', String(250), nullable=True), Column('cf_23', String(250), nullable=True), Column('cf_24', String(250), nullable=True), Column('cf_25', String(250), nullable=True), Column('cf_26', String(250), nullable=True), Column('cf_27', String(250), nullable=True), Column('cf_28', String(250), nullable=True), Column('cf_29', String(250), nullable=True), Column('cf_30', String(250), nullable=True), Column('cf_31', String(250), nullable=True), Column('cf_32', String(250), nullable=True), Column('cf_33', String(250), nullable=True), Column('cf_34', String(250), nullable=True), Column('cf_35', String(250), nullable=True), Column('list', String(250), nullable=True), Column('role', String(250), nullable=True), Column('sync', String(20), nullable=True), Column('id', Integer, nullable=False, primary_key=True) ) # Index('SYS_C00383283', segment_users_table.c.sf_carlyle_segment_id, segment_users_table.c.sf_id) return segment_users_table
def get_summoner(region, summoner_name): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) sql = u""" SELECT id, platform, IF(last_refresh_datetime < UTC_TIMESTAMP() - INTERVAL 20 MINUTE OR last_refresh_datetime IS NULL, 1, 0) as `can_refresh` FROM summoners WHERE 1 AND region = {} AND searchable_name = {} AND last_update_datetime > UTC_TIMESTAMP() - INTERVAL 7 DAY """.format( database.escape(region), database.escape(summoner_name) ) summoner = database.fetch_one_dict(sql) logger.warning(u'[get_summoner] summoner: {} with sql {}'.format(summoner, sql)) if not summoner: try: logger.warning(u'[get_summoner] adding request for summoner {}'.format(summoner_name)) response = request(API_URL_SUMMONER_SEARCH, region, summonerName=summoner_name) if response is None: return False data = response.get(summoner_name) summoner = { # 'id': data.get('summonerId'), # 'platform': data.get('currentUser').get('platformId'), 'id': data.get('id'), 'platform': PLATFORM_IDS[region], 'can_refresh': True, } except Exception as e: logger.warning(u'[get_summoner] Exception: {}'.format(e)) return False return summoner
def api_pull_match(match_id, region): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) # see if this match has already been recorded sql = u""" SELECT match_id FROM matches WHERE 1 AND match_region = {} AND match_id = {} AND details_pulled = 1 """.format( database.escape(region), match_id, ) logger.warning('[api_pull_match] sql: {}'.format(sql)) recorded_match_ids = database.fetch_all_value(sql) # don't record the match if it has already been recorded if match_id not in recorded_match_ids: logger.warning('adding request for match {}'.format(match_id)) match = request(API_URL_MATCH, region, matchId=match_id, includeTimeline=True) if match and 'timeline' in match: logger.warning('doing match_helper.get_stats()') match_stats = [match_helper.get_stats(match, detailed=True)] if match_stats: logger.warning('doing player_helper.get_stats()') player_stats = player_helper.get_stats(match_stats, database) logger.warning('inserting player stats') player_helper.insert(player_stats, database) for match_stat in match_stats: try: logger.warning('inserting match') match_helper.insert(match_stat, player_stats, database, detailed=True) except Exception: pass
def main(): template = open('email_template.html').read() conn = connect() dbconn = db.get_connection() i = 0 while True: emails = get_emails(dbconn) #emails = [('*****@*****.**',)] for (email,) in emails: i += 1 print '%s %s' % (i, email) try: sendEmail(email, template, conn) updateEmailStatus(dbconn, email, 1) except Exception, ex: updateEmailStatus(dbconn, email, -1) print email, '>>>>>>>>>>>>>', ex pass time.sleep(3)
def signup(message): """The first step in the three-legged OAuth handshake. You should navigate here first. It will redirect to login.uber.com. """ # TODO: check for token (match user object on slack ID) # if token, check token works, if not, generate new token as below and put it in the database. client = db.get_connection() # json user_uber_data = client.yhackslackpack.users.find_one({"_id": "U03FQDYTM"}) if user_uber_data["access_token"] != "": print user_uber_data["access_token"] # already authed, parse message return parse(request.args.get("message")) params = { 'response_type': 'code', 'redirect_uri': get_redirect_uri(request), 'scope': ' '.join(config.get('scope')), } url = generate_oauth_service().get_authorize_url(**params) return redirect(url)
def GetRoomID(details): ''' Get the room ID from the database. Details need to passed in in the format [campus, building, room number, capacity]. where occupancy is the capacity of the room. Will return the room ID as an integer''' con = db.get_connection() #con = lite.connect('wicount.sqlite3') c=con.cursor() try: c.execute ("create table if not exists room(room_id INTEGER PRIMARY KEY, campus VARCHAR(8), \ building VARCHAR(16), room VARCHAR(5), capacity INTEGER);") con.commit() except OperationalError: print("Couldn't create the room table") try: sql_String = "SELECT room_id FROM room WHERE campus = '" + details[0] + \ "' AND building = '" + details[1] + "' AND room = '" + details[2] + "';" #print ("sql_String: ", sql_String) c.execute(sql_String) room_ID = c.fetchone() if room_ID: room_ID = room_ID[0] if details[3] > 0: #update the occupancy if these details are passed in. sql_String = "UPDATE room SET capacity=" + str(details[3]) + \ " WHERE room_id=" + str(room_ID) + ";" c.execute(sql_String) #con.commit() else: room = [details[0],details[1],details[2],details[3]] c.execute('INSERT INTO room (campus, building, room, capacity) VALUES (?, ?, ?, ?)', room) c.execute(sql_String) #con.commit() room_ID = c.fetchone()[0] con.commit() except OperationalError: print ("Command skipped: ", sql_String) return room_ID
def run(): in_queue = Queue() logger.debug("Spinning up {} threads".format(THREADS)) for i in range(THREADS): Thread(target=thread_find_user, args=(in_queue,)).start() summoners_query = u''' SELECT id, region FROM summoners WHERE 1 {} AND last_spider_datetime > UTC_TIMESTAMP() - INTERVAL 1 DAY OR last_spider_datetime is NULL ORDER BY RAND() LIMIT {} ''' while not stopThreadEvent.is_set(): logger.info("Finding Summoners...") database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) summoners = [] summoners = summoners + database.fetch_all_dict(summoners_query.format('', 20)) summoners = summoners + database.fetch_all_dict(summoners_query.format( "AND rank_tier = 'BRONZE'", 100 )) summoners = summoners + database.fetch_all_dict(summoners_query.format( "AND rank_tier = 'MASTER'", 2 )) summoners = summoners + database.fetch_all_dict(summoners_query.format( "AND rank_tier = 'CHALLENGER'", 2 )) for summoner in summoners: in_queue.put((summoner['region'], summoner['id'])) logger.info("Queued...") while not in_queue.empty() and not stopThreadEvent.is_set(): sleep(0.1)
def run(): database = get_connection(DATABASE_HOST, DATABASE_PORT, DATABASE_USERNAME, DATABASE_PASSWORD, DATABASE_NAME) insert_values = [] response = request(API_URL_ITEMS, 'global') items_dict = response['data'] version = response['version'] for item in items_dict.values(): insert_values.append(u"({}, {}, {})".format( item['id'], database.escape(item['name']), database.escape('http://ddragon.leagueoflegends.com/cdn/{}/img/item/{}'.format(version, item['image']['full'])), )) insert_query = u''' INSERT INTO items (id, name, image_icon_url) VALUES {} '''.format(u','.join(insert_values)) database.execute('TRUNCATE TABLE items') database.execute(insert_query)
from queries import GetMessageSources import jabber source_handlers = { 'jabber': jabber.run, } def run(connection): get_message_sources = GetMessageSources(connection) message_sources = get_message_sources() for source in message_sources: if source.type in source_handlers: source_handlers[source.type](source) else: raise Exception('unknown source type: '+source.type) if __name__ == '__main__': import logging logging.basicConfig(level=logging.DEBUG) from db import get_connection run(get_connection())
''' convert the excel sheet to a csv''' data_xls = pd.read_excel(file, 'JustData', index_col=None) data_xls.to_csv('survey.csv', encoding='utf-8') def GetRoomNo(room): ''' format the room number so it is in the standard format B-002''' if room != "": room = room.replace(".", "") room_no = room[:1] + "-"+ room[1:] else: room_no = "" #print ("room ", room_no) return room_no #Open connection to the database con = db.get_connection() c=con.cursor() def main(): #------------------------------------------------------- #set up variables. #------------------------------------------------------- dayList = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"] timeList = ["9.00-10.00", "10.00-11.00", "11.00-12.00", "12.00-13.00", \ "13.00-14.00", "14.00-15.00", "15.00-16.00", "16.00-17.00"] fullDetails = [] roomIDs = [] occupancyDetails = [] day = "Mon" #initialise variable date = ""
def __init__(self): self.r = db.get_connection()
def setUp(self): db.reset_connection() db.set_database_type("postgres") self.assertNotEqual(db.connect_default(), None) self.assertNotEqual(db.get_connection(), None) pass