def create_quiz(): db_connection = database.create_database_connection() try: body = request.get_json() user_id = extract_field_from_body('user_id', body) short_url = extract_field_from_body('short_url', body) question = extract_field_from_body('question', body) answers_target = extract_field_from_body('answers_target', body) quiz_id = database.insert_quiz(db_connection, user_id, short_url, question, answers_target) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps({"id": quiz_id}), status=201, mimetype='application/json')
def get_user(): db_connection = database.create_database_connection() try: body = request.get_json() email = extract_field_from_body('email', body) user = database.get_user_by_email(db_connection, email) data = { 'id': user[0], 'email': user[1], 'password': user[2], 'activated': user[3], 'nickname': user[4], 'created_date': user[5] } except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps(data), status=201, mimetype='application/json')
def main(): '''entry point of the code''' config = tables.config_from_xml_file(create.CONFIG_FILE) database_connection = database.create_database_connection( config['mysql_config']) database_cursor = database.create_database_cursor(database_connection) query_001(config, database_cursor) query_002(config, database_cursor)
def main(): config = config_from_xml_file(CONFIG_FILE) mysql_config = copy.deepcopy(config['mysql_config']) del mysql_config[ 'database'] # databases can't be checked or created if a database is specified database_connection = create_database_connection(mysql_config) database_cursor = create_database_cursor(database_connection) if database_exists(config, database_cursor): print("'{}' database already exists.".format(config['database_name'])) else: create_database(config, database_cursor) print() database_cursor.stop_cursor() database_connection.disconnect() database_connection = create_database_connection(config['mysql_config']) database_cursor = create_database_cursor(database_connection) for table in config['tables'].keys(): if table_exists(table, database_cursor): print("'{}' table already exists.".format(table)) else: create_mysql_table(config, table, database_connection, database_cursor) print("'{}' table has been created.".format(table)) print("Populating '{}' table with data......".format(table)) load_data_into_mysql_table(config, table, database_connection, database_cursor) print("Population of '{}' table has been completed".format(table)) print() database_cursor.stop_cursor() database_connection.disconnect()
def activate_user(): db_connection = database.create_database_connection() try: body = request.get_json() token = extract_field_from_body('token', body) database.activate_user(db_connection, token) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response('User activated.', status=200, mimetype='application/json')
def get_quiz(id): db_connection = database.create_database_connection() try: body = request.get_json() user_id = extract_field_from_body('user_id', body) quiz = database.get_quiz(db_connection, id, user_id) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps(quiz), status=200, mimetype='application/json')
def create_activation_token(): db_connection = database.create_database_connection() try: body = request.get_json() user_id = extract_field_from_body('user_id', body) token = extract_field_from_body('token', body) activation_token_id = database.insert_activation_token( db_connection, user_id, token) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps({"id": activation_token_id}), status=201, mimetype='application/json')
def create_user(): db_connection = database.create_database_connection() try: body = request.get_json() email = extract_field_from_body('email', body) password = extract_field_from_body('password', body) nickname = extract_field_from_body('nickname', body) user_id = database.insert_user( db_connection, email, password, nickname) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps({"id": user_id}), status=201, mimetype='application/json')
def answer_quiz(quiz_id): db_connection = database.create_database_connection() try: body = request.get_json() user_id = extract_field_from_body('user_id', body) option_id = extract_field_from_body('option_id', body) database.answer(db_connection, quiz_id, user_id, option_id) database.result(db_connection, quiz_id) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response('Answer added.', status=201, mimetype='application/json')
def create_options(): db_connection = database.create_database_connection() try: body = request.get_json() texts = extract_field_from_body('texts', body) quiz_id = extract_field_from_body('quiz_id', body) for text in texts: database.insert_option(db_connection, quiz_id, text) except: return Response("Bad request.", status=400, mimetype='application/json') finally: database.close_connection(db_connection) return Response(json.dumps({}), status=201, mimetype='application/json')
def crawl(autocrawl=False, sleeptime=5, max=None, nodb=False, headless=False, queue=[], blacklist=set(), proxies=[], timeout=3): if not nodb: # Additional imports from database import create_database_connection, upload_set # Establish Database connection try: conn = create_database_connection() except Exception as e: print(e) sys.exit() # Scrape tracks driver = None urls = queue urls_scraped = set() error_count = 0 proxy_index = 0 while (len(urls) > 0): num_current = len(urls_scraped) + 1 num_overall = str(len(urls) + len(urls_scraped)) + ("+" if autocrawl else "") # Create Browser if (len(proxies) > 0): if (num_current % 10 == 0 or driver is None): proxy_index += 1 if (driver is not None): driver.quit() if (proxy_index > len(proxies)): proxy_index = 0 # Wait for a config with a working Proxy try: driver = create_browser(headless=headless, proxy=proxies[proxy_index], timeout=timeout) except BadProxyException as e: proxies.remove(proxies[proxy_index]) continue else: driver = create_browser(headless=headless, timeout=timeout) # Exit if enough sets are scraped is reached if (max is not None and max < num_current): break print("Scraping url %s of %s" % (num_current, num_overall)) url = urls[0] if (url == "" or url is None): continue if (url in blacklist): print("Url was found in the blacklist, skipping") urls_scraped.add(urls.pop(0)) continue print("Scraping %s:" % url) try: html = get_page_source(driver, url, autocrawl=autocrawl) except Exception as e: if (error_count >= 3): print("Couldn't recieve page source. Error %s, skipping" % e) urls_scraped.add(urls.pop(0)) error_count = 0 continue else: print("Couldn't recieve page source retrying") error_count += 1 continue try: setlist = scrape_set(html, url) except Exception as e: if (error_count >= 3): print("Couldn't scrape set because of %s, skipping" % e) urls_scraped.add(urls.pop(0)) error_count = 0 continue else: print("Couldn't scrape set retrying") error_count += 1 continue # Save the scraped information if not nodb: upload_set(conn, setlist) else: try: with open("output.json", "r+") as f: data = json.load(f) except Exception as e: data = [] data.append(setlist) with open("output.json", "w+") as f: f.write(json.dumps(data)) # Add links to queue if (autocrawl): if (setlist["previous_set"]): url = setlist["previous_set"] if not (url in urls) and not (url in urls_scraped) and not ( url in blacklist): urls.append(url) print("Added previous setlist %s to queue." % setlist["previous_set"]) if (setlist["next_set"]): url = setlist["next_set"] if not (url in urls) and not (url in urls_scraped) and not ( url in blacklist): urls.append(url) print("Added next setlist %s to queue." % setlist["next_set"]) for link in setlist["artist_links"]: url = link if not (url in urls) and not (url in urls_scraped) and not ( url in blacklist): urls.append(url) print("Added artist setlist %s to queue." % link) for link in setlist["related_links"]: url = link if not (url in urls) and not (url in urls_scraped) and not ( url in blacklist): urls.append(url) print("Added related setlist %s to queue." % link) # Move url to already scraped urls_scraped.add(urls.pop(0)) time.sleep(sleeptime) # Clear queue.txt & close browsers print("Scraped %s sets" % num_current) driver.quit()
import time from database import create_database_connection from database.helpers import fetch_all from database.spotify_song import create_spotify_song from functions.spotify import fetch_song_general if __name__ == "__main__": conn = create_database_connection() SQL = "SELECT songs.id, songs.title, artists.name FROM songs JOIN artists ON songs.artist_id = artists.id WHERE NOT EXISTS ( SELECT * from Spotify_Songs WHERE Spotify_Songs.song_id = songs.id ) GROUP BY songs.id, artists.name ORDER BY random() LIMIT 500" records = fetch_all(conn, SQL) for song in records: try: metatdata = fetch_song_general(song[1], song[2]) create_spotify_song(conn, song[0], metatdata) except Exception as e: cursor = conn.cursor() cursor.execute("ROLLBACK") print(e) time.sleep(2)
if query_result is None: raise Exception("Error in getting mutation list") all_del = [] curr_del = [] for one_result in query_result.fetchall(): if len(curr_del) == 0 or one_result["ref_idx"] == curr_del[-1]["ref_idx"] + 1: curr_del.append(one_result) else: del_str = "" for one_del in curr_del: del_str += reference_genome[one_del["ref_idx"]] all_del.append({ "ref_idx": curr_del[0]["ref_idx"], "del_str": del_str }) curr_del = [] return all_del if __name__ == "__main__": dataset = 'ugrad' global db, reference_genome db = database.create_database_connection(database=dataset) # save the mutation list into answer file format with open("dataset/%s/reference_genome.pickle" % dataset, "rb") as f: reference_genome = pickle.load(f) save_mutation_to_file()