def get_all_players(self): """ Get rankings of all players- get their ID's""" config.connect(self._conn) res = self._conn.getresponse() data = res.read() data_dec = json.loads(data.decode("utf-8")) self._rankings = data_dec['rankings'][1]['player_rankings']
def saveDB(df_answers): print('Saving database') c=connect().cursor() c.execute('CREATE TABLE KB (title text, Category text, answers text, tags_filtered text)') connect().commit() df_answers.to_sql('KB', connect(), if_exists='replace', index = False) return
def GetWeatherData(dateRange, metrics, time, function): #add in datetime and station ids, these are required for every query metrics.insert(0, 'date_time') metrics.insert(0, 'stationpoints_id') #if the request is only for data from one day if len(dateRange) == 1: s = "" #initiate empty string, add each metric to string for query for i in metrics: if i != metrics[-1]: s += str(i) s += ', ' else: s += str(i) #create SQL queries. q is for the weather data, q1 is for the station data (includes gps locations) q = 'SELECT ' + s + ' FROM public.mesowest_weather_data_2011 WHERE date_time::date BETWEEN ' + str( dateRange)[1:-1] + ' AND ' + str(dateRange)[1:-1] q1 = "SELECT geom, objectid, latitude, longitude, status FROM public.mesowest_stationpoints WHERE objectid IN(SELECT stationpoints_id FROM public.mesowest_weather_data_2011 WHERE date_time::date BETWEEN " + str( dateRange)[1:-1] + " AND " + str(dateRange)[1:-1] + ")" #if the request is for a date range elif len(dateRange) == 2: s = "" #initiate empty string, add each metric to string for query for i in metrics: if i != metrics[-1]: s += str(i) s += ', ' else: s += str(i) #create SQL queries. q is for the weather data, q1 is for the station data (includes gps locations) q = "SELECT " + s + " FROM public.mesowest_weather_data_2011 WHERE date_time::date BETWEEN '" + dateRange[ 0] + "' AND '" + dateRange[1] + "'" q1 = "SELECT geom, objectid, latitude, longitude, status FROM public.mesowest_stationpoints WHERE objectid IN (SELECT stationpoints_id FROM public.mesowest_weather_data_2011 WHERE date_time::date BETWEEN '" + dateRange[ 0] + "' AND '" + dateRange[1] + "')" print(q1) #connect to database and query db_data = config.connect(q) db_stations = config.connect(q1) #change results from database into dataframes df_data = DataFrame.from_records(db_data, columns=metrics) df_stations = DataFrame.from_records( db_stations, columns=['geom', 'station_id', 'latitude', 'longitude', 'status']) for m in metrics: #change columns to numeric df_data[m] = pd.to_numeric(df_data[m], errors='coerce') #change date_time column into a date and time column temp = pd.DatetimeIndex(df_data['date_time']) df_data['date'], df_data['time'] = temp.date, temp.time del df_data['date_time'] del temp df_data.rename( columns={'stationpoints_id': 'station_id'}, inplace=True ) #match col names across df_data and df_stations (necessary for merge) return df_data, df_stations
def main(): pre_get_all_targets = """SELECT id, jp_name AS name, jp_start, jp_over, as_start, as_over FROM `{target}` WHERE jp_name IS NOT NULL AND as_name IS NOT NULL AND jp_start IS NOT NULL AND as_start IS NULL""" pre_set_as_time = "UPDATE `{target}` SET as_start = %s, as_over = %s WHERE (id = %s)" todos = [{ 'name': '白金轉蛋', 'target': 'Gasha' }, { 'name': '活動', 'target': 'Event' }] connection = connect() with connection.cursor() as cursor: for todo in todos: print('開始處理' + todo['name']) get_all_targets = pre_get_all_targets.format(target=todo['target']) set_as_time = pre_set_as_time.format(target=todo['target']) cursor.execute(get_all_targets) targets = cursor.fetchall() for tgt in targets: new_start = tgt['jp_start'].replace(year=tgt['jp_start'].year + 2) new_over = tgt['jp_over'].replace( year=tgt['jp_over'].year + 2) if tgt['jp_over'] is not None else None print('Update as_start, as_over for', tgt['name'], new_start, new_over) cursor.execute(set_as_time, (new_start, new_over, tgt['id'])) connection.commit() connection.close()
def main(): # make connection to database con = config.connect() # opne cursor cursor = con.cursor(dictionary=True) ch = 0 # diaplay menu until user presses 5 while (ch <= 4): # menu options print("1. INSERT") print("2. UPDATE") print("3. DELETE") print("4. DISPLAY") print("5. EXIT") # ask user to enter what he wants to do ch = int(input("Ce doriti sa faceti? Introduceti cifra dorita:")) # call relevant fucntions defined above if (ch == 1): insert(con, cursor) if (ch == 2): update(con, cursor) if (ch == 3): delete(con, cursor) if ch == 4: display(cursor)
def main(): parser = argparse.ArgumentParser( description='Arguments for move_events.py') parser.add_argument('--real', action="store_true", default=False, help='real run') args = parser.parse_args() events = get_all_events() connection = connect() with connection.cursor() as cursor: sql_ins_events = """INSERT INTO `Event` (`jp_name`, `as_name`, `jp_start`, `jp_over`, `as_start`, `as_over`, `event_type`, `fake_id`, `event_subtype`, comment) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""" if not args.real: cursor = DryCursor(cursor) for e in events: cursor.execute( sql_ins_events, (e['jp_name'], e['as_name'], e['jp_start'], e['jp_over'], e['as_start'], e['as_over'], e['event_type'], e['id'], e['event_subtype'], e['comment'])) connection.commit() connection.close()
def pushCorpus(links): ''' For each link 1) extract article text 2) clean it 3) push the text the word count and the foreign key to news_corpus table ''' conn = config.connect() cursor = conn.cursor() pushedElms = 0 for elm in links: link = elm[1] clean = cl.parseForCorpus(link) cursor.execute( "INSERT INTO news_corpus (article_text, word_count, article_id) VALUES (%s, %s, %s);", (clean[0], clean[1], elm[0])) pushedElms += 1 conn.commit() print(str(pushedElms) + " new records intserted into news_corpus") cursor.close() conn.close() return pushedElms
def main(): parser = argparse.ArgumentParser( description='Arguments for move_event2card.py') parser.add_argument('--real', action="store_true", default=False, help='real run') args = parser.parse_args() rows = get_event2song_rows() errors = [] connection = connect() with connection.cursor() as cursor: sql_get_event_id = """SELECT id FROM `Event` WHERE (event_type = %s AND fake_id = %s)""" sql_ins_e2s = """INSERT INTO `EventToSong` (`EID`, `SID`, `comment`) VALUES(%s, %s, %s)""" for r in rows: cursor.execute(sql_get_event_id, (0, r['EID'])) event_id = cursor.fetchall() if not event_id: errors.append(str(0) + ' ' + str(r['EID']) + ' Not Found') continue event_id = event_id[0]['id'] r['id'] = event_id if not args.real: cursor = DryCursor(cursor) for r in rows: cursor.execute(sql_ins_e2s, (r['id'], r['SID'], r['comment'])) connection.commit() connection.close() for err in errors: print(err)
def get_event2song_rows(): sql_e2s = """SELECT `EID`, `SID`, `comment` FROM `PSTEventToSong`""" connection = connect() with connection.cursor() as cursor: cursor.execute(sql_e2s) rows = cursor.fetchall() connection.close() return rows
def contacts_query(): # On this page you should show the result of a query that returns the name of the school plus the name of contact person # at the school (from the mentors table) ordered by the name of the school # columns: schools.name, mentors.first_name, mentors.last_name result = config.connect("""SELECT schools.name, mentors.first_name, mentors.last_name FROM mentors RIGHT JOIN schools ON mentors.id = schools.contact_person ORDER BY schools.name ASC;""") return result
def mentors_schools_query(): # a query that returns the name of the mentors plus the name and country of # the school (joining with the schools table) ordered by the mentors id column (columns: mentors.first_name, # mentors.last_name, schools.name, schools.country). result = config.connect("""SELECT mentors.first_name, mentors.last_name, schools.name, schools.country FROM mentors INNER JOIN schools ON mentors.city = schools.city ORDER BY mentors.id ASC;""") return result
def mentors_by_country_query(): # On this page you should show the result of a query that returns the number of the mentors per country ordered by # the name of the countries # columns: country, count result = config.connect("""SELECT schools.country, count(mentors.id) FROM mentors JOIN schools ON mentors.city = schools.city GROUP BY country ORDER BY schools.country ASC;""") return result
def createTable(sql_commands, df): conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() f = io.StringIO() df.to_csv(f, sep='\t', header=False, index=False, na_rep='NULL') f.seek(0) print('total number of records: %s' % len(df)) execQuery(sql_commands)
def all_schools_query(): # a query that returns the name of the mentors plus the name and country of the school # (joining with the schools table) ordered by the mentors id column. # BUT include all the schools, even if there's no mentor yet! # columns: mentors.first_name, mentors.last_name, schools.name, schools.country result = config.connect("""SELECT mentors.first_name, mentors.last_name, schools.name, schools.country FROM mentors RIGHT JOIN schools ON mentors.city = schools.city ORDER BY mentors.id ASC;""") return result
def applicants_query(): # On this page you should show the result of a query that returns the first name and the code of the applicants plus # the creation_date of the application (joining with the applicants_mentors table) ordered by the creation_date in # descending order BUT only for applications later than 2016-01-01 # columns: applicants.first_name, applicants.application_code, applicants_mentors.creation_date result = config.connect("""SELECT applicants.first_name, applicants.application_code, applicants_mentors.creation_date FROM applicants JOIN applicants_mentors ON applicants.id = applicants_mentors.applicant_id WHERE creation_date > '2016-01-01' ORDER BY applicants_mentors.creation_date ASC;""") return result
def sql_query(query_str): con = connect() data = [] try: with con.cursor() as cursor: sql = query_str cursor.execute(sql) for row in cursor: data.append(row) return data finally: con.close()
def __init__(self): global WITH_GRAMP_CONFIG global PAT_AS_SURN self.name_formats = {} if WITH_GRAMPS_CONFIG: self.default_format = config.get('preferences.name-format') if self.default_format == 0: self.default_format = Name.LNFN config.set('preferences.name-format', self.default_format) #if only one surname, see if pa/ma should be considered as # 'the' surname. PAT_AS_SURN = config.get('preferences.patronimic-surname') config.connect('preferences.patronimic-surname', self.change_pa_sur) else: self.default_format = Name.LNFN PAT_AS_SURN = False #preinit the name formats, this should be updated with the data #in the database once a database is loaded self.set_name_format(self.STANDARD_FORMATS)
def execQuery(sql_commands): conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() try: for command in sql_commands: print(command) dbcursor.execute(command) except (Exception, psycopg2.DatabaseError) as error: print(error) finally: conn.commit() conn.close()
def getCountyList(): connection = config.connect() cursor = connection.cursor() cursor.execute(f"SELECT county FROM covid2020") dbData = cursor.fetchall() removeDupe = list(dict.fromkeys(dbData)) counties = [] for county in removeDupe: counties.append(County(county[0])) counties = sorted(counties, key=lambda x: x.name) return counties
def uploadChanges(): last_remote_id = getLastRemoteId() conn = config.connect() mysqldb = conn.cursor() mysqldb.execute("SELECT * FROM boletas") result_set = mysqldb.fetchall() for row in result_set: local_id = row[0] if (int(local_id) > int(last_remote_id)): mongocol.insert_one({ "sync_id": local_id }) print('record uploaded') print('Synced') mysqldb.close()
def corpusAllArticles(): ''' Pushes all records from the team_news table to the corpus ''' conn = config.connect() cursor = conn.cursor() cursor.execute("SELECT id, link, date FROM team_articles WHERE %s" % newsTypeConditions) newsLinks = (cursor.fetchall()) cursor.close() conn.close() return pushCorpus(newsLinks)
def applicants_mentors_query(): # On this page you should show the result of a query that returns the first name and the code of the applicants plus # the name of the assigned mentor (joining through the applicants_mentors table) ordered by the applicants id column # Show all the applicants, even if they have no assigned mentor in the database! # In this case use the string 'None' instead of the mentor name # columns: applicants.first_name, applicants.application_code, mentor_first_name, mentor_last_name result = config.connect("""SELECT applicants.first_name, applicants.application_code, mentors.first_name, mentors.last_name FROM ((applicants LEFT JOIN applicants_mentors ON applicants.id = applicants_mentors.applicant_id) LEFT JOIN mentors ON mentors.id = applicants_mentors.mentor_id) ORDER BY applicants.id ASC;""") return result
def teamQueryTexts(team): conn = config.connect() cursor = conn.cursor() cursor.execute( "SELECT news_corpus.id, link, team, article_text FROM news_corpus INNER JOIN team_articles ON team_articles.id = news_corpus.article_id WHERE team = '%s'" % team) elms = cursor.fetchall() cursor.close() conn.close() return elms
def dbSize(): conn = config.connect() cursor = conn.cursor() cursor.execute(" SELECT pg_size_pretty( pg_database_size('nflparse'))") dbsize = cursor.fetchall()[0][0] cursor.execute( " SELECT pg_size_pretty( pg_total_relation_size('news_corpus'))") corpusSize = cursor.fetchall()[0][0] cursor.close() conn.close() return dbsize, corpusSize
def get_event2card_rows(): pre_sql_e2c = """SELECT `id`, `EID`, `CID`, `comment`, `type` AS card_type, {type} AS event_type FROM {table}""" table_list = ['PSTEventToCard', 'CollectEventToCard', 'AnniversaryToCard', None, None, 'OtherEventToCard', None] sql_list = [] for i in range(len(table_list)): if table_list[i] is not None: sql_list.append(pre_sql_e2c.format(table=table_list[i], type=i)) sql_all_e2c = " UNION ALL ".join(sql_list) + " ORDER BY `event_type`, `id`" connection = connect() with connection.cursor() as cursor: cursor.execute(sql_all_e2c) rows = cursor.fetchall() connection.close() return rows
def save_websheet(problem, definition): db = config.connect() cursor = db.cursor() def done(**response): cursor.close() db.commit() db.close() print(json.dumps(response)) sys.exit(0) action = 'save' cursor.execute( "insert into ws_sheets (author, problem, definition, action, sharing)" + " VALUES (%s, %s, %s, %s, %s)", ('leila', problem, definition, action, 'visible')) done(success=True, message=action + " of " + problem + " successful.")
def main(): get_idols = "SELECT id, as_name FROM `Idol` WHERE (color IS NULL AND NOT type = 4)" set_color = "UPDATE `Idol` SET color = %s WHERE (id = %s)" connection = connect() with connection.cursor() as cursor: cursor.execute(get_idols) idols = cursor.fetchall() for idol in idols: name = idol['as_name'] url = 'https://zh.moegirl.org.cn/' + name span = bs4_data(url, string=re.compile('.*印象色是.*')) if span: color = span[0].nextSibling.find('span')["style"].split( ':')[1][:-1] print(name, color) cursor.execute(set_color, (color, idol['id'])) connection.commit() connection.close()
def main(): global errors parser = argparse.ArgumentParser(description='Arguments for crawcards.py') parser.add_argument('--real', action="store_true", default=False, help='real run') args = parser.parse_args() idol_jp_names, idol_as_names = get_idol_names() with open('cards.json') as f: data = json.load(f) handle_data(data, idol_jp_names, 'jp') with open('ascards.json') as f: as_data = json.load(f) handle_data(as_data, idol_as_names, 'as') connection = config.connect() with connection.cursor() as cursor: if not args.real: cursor = DryCursor(cursor) # 更新 (日版) 卡片資訊 for card in data: if card['extraType'] in [5, 7, 10] and card['rarity'] == 4: continue handle_card(card, Database(cursor, is_jp=True), data, True) # 更新 (海外版) 卡片資訊 for card in as_data: if card['extraType'] in [5, 7, 10] and card['rarity'] == 4: continue handle_card(card, Database(cursor, is_jp=False), data, False) connection.close() print('Errors:') for error in errors: print(error)
def main(): idol_jp_names, idol_as_names = get_idol_names() with open('cards.json') as f: data = json.load(f) handle_data(data, idol_jp_names, 'jp') sql_set_cardid = "UPDATE `Card` SET card_id = %s WHERE (jp_name = %s)" connection = connect() with connection.cursor() as cursor: for card in data: print(card['name']) cursor.execute(sql_set_cardid, (card['id'], card['name'])) cursor.execute(sql_set_cardid, (card['id'], card['name']+'+')) connection.commit() connection.close()
def main(): connection = connect() sql_select_idols = 'SELECT `id`, `jp_name`, `as_name` FROM `Idol` WHERE id <= 52' ''' sql_insert_group = 'INSERT INTO `Group`(`jp_name`, `as_name`) VALUES(%s, %s)' sql_select_group = 'SELECT `id` FROM `Group` WHERE (`jp_name` = %s)' ''' sql_insert_g2i = 'INSERT INTO `GroupToIdol`(`GID`, `IID`) VALUES(%s, %s)' with connection.cursor() as cursor: cursor.execute(sql_select_idols) idols = cursor.fetchall() for idol in idols: if not idol['jp_name'] == '田中琴葉': cursor.execute(sql_insert_g2i, (3, idol['id'])) connection.commit() connection.close()
plot.append(total) return plot if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--acstable", help="ACS Table") parser.add_argument("--lon", help="Longitude", type=float) parser.add_argument("--lat", help="Latitude", type=float) parser.add_argument("--level", help="Census geography level", default="tract") parser.add_argument("--start", type=int, default=1000) parser.add_argument("--end", type=int, default=50000) parser.add_argument("--width", type=int, default=1000) args = parser.parse_args() plots = [] with config.connect() as conn: for radius in range(args.start, args.end, args.width): plot = QueryRadial(conn=conn).query(acstable=args.acstable, radius_inner=radius, radius_outer=radius+args.width, level=args.level, lon=args.lon, lat=args.lat) plots.append(plot) # Ugly, dirty csv. Fix me. params = acs.ACSMeta.get(args.acstable).getchildren() writer = csv.writer(sys.stdout) writer.writerow(['Radial distribution query:']) writer.writerow(['lon',args.lon]) writer.writerow(['lat',args.lat]) writer.writerow(['geography',args.level]) writer.writerow(['width',args.width]) writer.writerow([]) writer.writerow(['']+[i.title for i in params]) for count, row in enumerate(plots):
#!/usr/bin/python3 import sys, json, config, re if __name__ == "__main__": db = config.connect() cursor = db.cursor() # should pass message which is a string def internal_error(message): cursor.close() db.commit() db.close() print(json.dumps(message)) sys.exit(0) # should pass response which is an object def done(**response): cursor.close() db.commit() db.close() print(json.dumps(response)) sys.exit(0) def owner(slug): cursor.execute( "select author, action from ws_sheets " + "WHERE problem = %s AND action != 'preview' ORDER BY ID DESC LIMIT 1;", [slug]) result = "false" for row in cursor: author = row[0]
def main(): #find stations to update query = "SELECT * FROM stations WHERE activated = 1" query2 = [] rows = [] prevreq = [] cursor.execute(query) rows = cursor.fetchall() for row in rows: #get information from files if row["sourceformat"] == "xml": track = getinfofromxml(row["sourceurl"]) elif row["sourceformat"] == "json": track = getinfofromjson(row["sourceurl"]) elif row["sourceformat"] == "tunein": track, prevreq = getinfofromshoutcastpage(row["sourceurl"], prevreq, row["name"]) elif row["sourceformat"]=="idobi": track = getinfofromidobi(row["sourceurl"]) else: track = dict({"title":"", "artist":"", "coverurl":"css/nocover.jpg"}) if track["title"] is None: track["title"] = str("") else: track["title"] = track["title"].encode("utf-8") if track["artist"] is None: track["artist"] = str("") else: track["artist"] = track["artist"].encode("utf-8") if track["coverurl"] is None: track["coverurl"] = str("css/nocover.jpg") else: track["coverurl"] = track["coverurl"].encode("utf-8") if track["album"] is None: track["album"] = str("") else: track["album"] = track["album"].encode("utf-8") #I'm not stupid, this is the workaround to an even more complicated problem cnx2 = config.connect() cursor2 = cnx2.cursor(MySQLdb.cursors.DictCursor) cursor2.execute("SELECT lastfm_auth FROM users WHERE timeout>CURRENT_TIMESTAMP AND station="+str(row["id"])) dat=cursor2.fetchall() cursor2.close() cnx2.close() if track["title"] != row["c_title"] and track["title"] != "": #update db at change query2.append("UPDATE stations SET c_title = '" + MySQLdb.escape_string(track["title"]) + "', c_artist='" + MySQLdb.escape_string(track["artist"]) + "', album='" + MySQLdb.escape_string(track["album"]) + "', coverurl='" + MySQLdb.escape_string(track["coverurl"]) + "' WHERE id = " + str(row["id"]) + "; ") #scrobble now for active users for entry in dat: m = hashlib.md5() #generating the signature -> submit to last.fm presig = "album" + row["album"] + "api_key" + config.api_key() + "artist" + row["c_artist"] + "methodtrack.scrobble" + "sk" + entry["lastfm_auth"] + "timestamp" + str(int(time.time())) + "track" + row["c_title"] + config.secret() m.update(presig) sig = m.hexdigest() url = "http://ws.audioscrobbler.com/2.0/?method=track.scrobble&album=" + row["album"] + "&api_key=" + config.api_key() + "&artist=" + row["c_artist"] + "&sk=" + entry["lastfm_auth"] + "×tamp=" + str(int(time.time())) + "&track=" + row["c_title"] + "&api_sig=" + sig req = requests.post(url) print(row["c_title"] + " - " + row["c_artist"] + " - " + row["album"]) #send nowplaying request for entry in dat: x = hashlib.md5() presigg = "album" + track["album"] + "api_key" + config.api_key() + "artist" + track["artist"] + "duration30methodtrack.updateNowPlaying" + "sk" + entry["lastfm_auth"] + "timestamp" + str(int(time.time())) + "track" + track["title"] + config.secret() x.update(presigg) sigg = x.hexdigest() url = "http://ws.audioscrobbler.com/2.0/?method=track.updateNowPlaying&album=" + track["album"] + "&api_key=" + config.api_key() + "&artist=" + track["artist"] + "&sk=" + entry["lastfm_auth"] + "×tamp=" + str(int(time.time())) + "&track=" + track["title"] + "&api_sig=" + sigg + "&duration=30" rex = requests.post(url) if not rex.text.find('<lfm status="ok">'): print(rex.text) for q in query2: cursor.execute(q) cnx.commit()
#!/usr/bin/env python import MySQLdb import config import hashlib import re import requests import time import xml.etree.ElementTree as ET cnx = config.connect() cursor = cnx.cursor(MySQLdb.cursors.DictCursor) def getrequest(url, header): try: if header==1: return requests.get(url, headers={'X-Requested-With': 'XMLHttpRequest', 'Accept-Language': 'de-DE,en-US;q=0.7,en;q=0.3'}) elif header==2: return requests.get(url, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0'}) else: return requests.get(url) except requests.ConnectionError: print("another attempt") return getrequest(url, header) def getinfofromxml(url): data=getrequest(url, 0) track = dict() try: inf = ET.fromstring(data.content) track["title"] = inf[2][0][1].text
import fut14 import time import os import config import datetime import pytz i=-1 successful_bids = [] fut = None backoff = 5 bought_something = False while not fut: try: fut = config.connect() except: print "Failed connecting, retrying in %d.." % backoff backoff += 5 time.sleep(backoff) print "Connected.." def isSellTime(): tz = pytz.timezone('Europe/London') if datetime.datetime.now(tz).hour in range (11, 22): return True return False def patched_tradepileDelete(trade_id): url = '{url}/{par}'.format(url=fut.urls['fut']['TradeInfo'], par=trade_id)