def get_self_challenge(user_id): conn = db_connection.get_connection() cursor = conn.cursor() result_dict = {} try: SQL_QUERY = "SELECT * FROM self_challenge WHERE id=%s" cursor.execute(SQL_QUERY, (user_id, )) result = cursor.fetchall() res = [] try: for row in result: result_dict = { 'c_id': row[0], 'id': row[1], 'descripition': row[2], 'imageurl': row[3], 'eventname': row[4], 'steps': row[5], 'calories': row[6] } res.append(result_dict) except IndexError as e: result_dict = {'error': e} except (Exception, psycopg2.Error) as error: print(error) return error return res
def get_gps_points(user_id, start_ts, end_ts): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT * FROM gps_location WHERE time BETWEEN %s AND %s AND id=%s" value = (start_ts, end_ts, user_id) cursor.execute(SQL_QUERY, value) result = cursor.fetchall() if cursor.rowcount > 0: status = "success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" result_dict = {} try: for row in result: result_dict = { 'id': row[0], 'gps_long': row[1], 'gps_lat': row[2], 'time': datetime.datetime.utcfromtimestamp( row[4]).strftime('%Y-%m-%dT%H:%M:%SZ') } except IndexError as e: result_dict = {'error': e} res = {'status': status, 'data': result_dict} return res
def print_navbar(id, curr_page): sql_query = 'SELECT * FROM Student WHERE StudentID='+id cursor = db_connection.get_connection() cursor.execute(sql_query) record = cursor.fetchone() name = record['FirstName']+' '+record['LastName'] print """\n <nav class="navbar navbar-default" role="navigation"><div class="container-fluid"> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar"> <span class="sr-only">Toggle navigation</span><span class="icon-bar"></span> <span class="icon-bar"></span><span class="icon-bar"></span> </button> <a class="navbar-brand" href="#">""" print name print """\n</a> </div><div id="navbar" class="navbar-collapse collapse"> <ul class="nav navbar-nav"> """ print '<li><a href="user_page.py">Details</a></li>' print '<li><a href="progress_page.py">Progress</a></li>' print '<li><a href="task_list.py">Task List</a></li>' print '<li><a href="playground.py">Playground</a></li>' print '<li><a href="league_table.py">League Table</a></li>' print """\n
def save_to_db(): size = 100 img_batches = generate_batch(size) for batch in img_batches: (img_input, img_labels, img_urls) = batch features, hashcodes = generate_feature_vector_and_hash(img_input) conn1 = get_connection() cursor1 = conn1.cursor() name_list = get_name_list() for i in range(0, len(img_urls)): feature = features[i].tostring() hashcode = hashcodes[i] label_id = int(img_labels[i]) img_id = re.search(r"image_(.*).jpg", img_urls[i]).group(1) cursor1.execute( "INSERT INTO img_info ( img_id, class, class_name, img_url) VALUES(%s, %s, %s, %s);", [ img_id, label_id, name_list[label_id].encode("utf-8"), img_urls[i] ]) cursor1.execute( "INSERT INTO hash_code ( img_id, hash_code) VALUES(%s, %s);", [img_id, str(hashcode)]) cursor1.execute( "INSERT INTO feature ( img_id, feature) VALUES(%s, %s);", [img_id, feature]) conn1.commit()
def get_calories_info(user_id, date): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT * FROM calories WHERE id=%s AND date=%s" value = (user_id, date) cursor.execute(SQL_QUERY, value) result = cursor.fetchall() if cursor.rowcount > 0: status = "success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" result_dict = {} try: for row in result: result_dict = { 'id': row[0], 'date': str(row[1]), 'calories_burnt': row[2], 'calories_consumed': row[3] } except IndexError as e: result_dict = {'error': e} res = {'status': status, 'data': result_dict} return res
def get_all_games(user_id): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT * FROM games WHERE uid=%s" cursor.execute(SQL_QUERY, (user_id, )) result = cursor.fetchall() result_dict = {} res = [] if (len(result) > 0): try: for row in result: result_dict = { "gid": row[0], "uid": row[8], "location_lat": row[1], "location_long": row[2], "pdate": str(row[3]), "stime": str(row[4]), "etime": str(row[5]), "level": row[6], "participants": row[7], "address": row[9], "gname": row[10] } res.append(result_dict) print(res) except IndexError as e: raise Exception except (Exception, psycopg2.Error) as error: raise Exception return res
def get_steps_info(user_id, date): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT steps FROM steps WHERE id=%s AND date=%s" value = (user_id, date) cursor.execute(SQL_QUERY, value) result = cursor.fetchall() if cursor.rowcount > 0: status = "Success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" result_dict = {} try: for row in result: result_dict = {'steps': row[0]} except IndexError as e: result_dict = {'error': e} res = {"status": status, 'data': result_dict} return res
def get_registration_deatils(user_id): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT * FROM registration_detail WHERE id=%s" cursor.execute(SQL_QUERY, (user_id, )) result = cursor.fetchall() if cursor.rowcount > 0: status = "success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" result_dict = {} try: for row in result: result_dict = { 'id': row[0], 'name': row[1], 'age': row[2], 'height': row[3], 'weight': row[4], 'bmi': row[5] } except Exception as identifier: print(identifier) res = {"status": status, 'data': result_dict} return res
def set_submit_as_processed_in_database(self, submit_id): conn = db_connection.get_connection() conn.autocommit = True cur = conn.cursor() query = 'SELECT set_submit_as_processed(%s);' cur.execute(query, (submit_id, )) conn.close()
def get_steps_info(user_id, date_start, date_end): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT steps FROM steps WHERE time BETWEEN %s AND %s AND id=%s" value = (date_start, date_end, user_id) cursor.execute(SQL_QUERY, value) result = cursor.fetchall() if cursor.rowcount > 0: status = "success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" # convert to python dictonary result_dict = {} try: for row in result: for count, day in enumerate(row): result_dict['day' + str(count)] = day except IndexError as e: result_dict = {'error': e} res = {"status": status, 'data': result_dict} return res
def get_user_information(user_id): try: SQL_QUERY = "SELECT * FROM user_personal_info WHERE id=%s" conn = db_connection.get_connection() cursor = conn.cursor() cursor.execute(SQL_QUERY, (user_id, )) result = cursor.fetchall() print(result) if cursor.rowcount > 0: status = "success" else: status = "No result" except (Exception, psycopg2.Error) as error: print(error) status = "Failed" result_dict = {} try: for row in result: result_dict = { 'id': row[0], 'fullname': row[1], 'firstname': row[2], 'lastname': row[3], 'photourl': row[4] } except IndexError as e: result_dict = {'error': e} res = {'status': status, 'data': result_dict} return res
def save_to_db(): size = 100 img_batches = generate_stochastic_train_batch(size) img_id = 0 batch_id = 0 for batch in img_batches: (img_input, img_labels) = batch if (batch_id <= 37): batch_id += 1 img_id += 100 continue features, hashcodes = generate_feature_vector_and_hash(img_input) conn1 = get_connection() cursor1 = conn1.cursor() for i in range(0, len(img_labels)): feature = features[i].tostring() hashcode = hashcodes[i] label_id = int(img_labels[i]) print(img_id) cursor1.execute( "INSERT INTO transfer_img_info ( img_id, class) VALUES(%s, %s);", [img_id, label_id]) cursor1.execute( "INSERT INTO transfer_hash_code ( img_id, hash_code) VALUES(%s, %s);", [img_id, str(hashcode)]) img_id += 1 conn1.commit()
def print_navbar_teacher(id, curr_page): sql_query = 'SELECT * FROM Teacher WHERE TeacherID='+id cursor = db_connection.get_connection() cursor.execute(sql_query) record = cursor.fetchone() name = record['FirstName']+' '+record['LastName'] print """\n <nav class="navbar navbar-default" role="navigation"><div class="container-fluid"> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar"> <span class="sr-only">Toggle navigation</span><span class="icon-bar"></span> <span class="icon-bar"></span><span class="icon-bar"></span> </button> <a class="navbar-brand" href="#">""" print name print """\n</a> </div><div id="navbar" class="navbar-collapse collapse"> <ul class="nav navbar-nav"> """ print '<li><a href="/teacher/user_page.py">Details</a></li>' print '<li><a href="/teacher/class_results.py">Class Administration</a></li>' print '<li><a href="/teacher/task_admin.py">Task Administration</a></li>' print '<li><a href="/teacher/task_creator.py">Task Creator</a></li>' print '<li><a href="/playground.py">Playground</a></li>' if(record['Administrator']==1): print '<li><a href="/teacher/site_admin.py">Site Administration</a></li>' print """\n
def get_tutorial(tutorial_id): cursor = db_connection.get_connection() sql = 'SELECT TutorialText FROM Tutorial WHERE TutorialID='+str(tutorial_id) cursor.execute(sql) tutorial = cursor.fetchone() return tutorial['TutorialText']
def increment_problem_number_of_accepted_solutions(self, problem_id, submit_id): conn = db_connection.get_connection() conn.autocommit = True cur = conn.cursor() cur.callproc('increment_problem_number_of_accepted_solutions', (problem_id, submit_id)) conn.close()
def remark_time(min_time, times): cursor = db_connection.get_connection() for time in times: if(time['DateCompleted'] is not None): delta = (time['DateCompleted'] - time['DateStarted']).seconds new_score = judge_time(min_time, delta) cursor.execute('''UPDATE Progress SET Time_Points=%s WHERE ProgressID=%s ''' % (str(new_score), str(time['ProgressID'])))
def is_present(user_id): conn = db_connection.get_connection() cursor = conn.cursor() SQL_QUERY = "SELECT id FROM gps_location WHERE id=%s " cursor.execute(SQL_QUERY, (user_id, )) result = cursor.fetchall() if len(result) >= 1: return True else: return False
def save_test_outcome_to_submit_result(self, submit_id, test_id, test_outcome): conn = db_connection.get_connection() conn.autocommit = True cur = conn.cursor() execution_error_code, execution_stdout, execution_stderr, execution_time_ms = test_outcome query = """INSERT INTO submit_result (submit_id, test_id, execution_return_code, execution_stdout, execution_stderr, execution_time_ms) VALUES (%s, %s, %s, %s, %s, %s);""" cur.execute(query, (submit_id, test_id, execution_error_code, execution_stdout, execution_stderr, execution_time_ms)) conn.close()
def get_tests(self, problem_id): conn = db_connection.get_connection() conn.autocommit = True cur = conn.cursor() query = """SELECT id, input, maximum_execution_time_ms FROM test WHERE problem_id = %s;""" cur.execute(query, (problem_id, )) tests = cur.fetchall() conn.close() return tests
def process(img): """ given an image, generate feature vector and hash code and search for the nearest image in the image database :param img: :return: the searched image path and image class """ num = 10 img = np.frombuffer(img, dtype=np.uint8) img = cv2.imdecode(img, cv2.COLOR_BGR2GRAY) img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) size = (224, 224) img_resize = cv2.resize(img_gray, size, interpolation=cv2.INTER_AREA) img_resize = img_resize / 256 img_resize = img_resize[np.newaxis, :, :] feature, hash_code = generate_feature_vector_and_hash(img_resize) hash_code = hash_code[0] similar_ids = find_picture_ids(hash_code, num) conn = get_connection() cursor = conn.cursor() result_img_id = 0 result_classes = dict() for pic_id in similar_ids: sql = "SELECT class from img_info where img_id={}".format(pic_id[1]) cursor.execute(sql) item_class = int(cursor.fetchone()[0]) if item_class in result_classes.keys(): result_classes[item_class].append(pic_id) else: result_classes[item_class] = [pic_id] max_times = 0 result_class = -1 for key, value in result_classes.items(): if len(value) > max_times: result_class = key max_times = len(value) print("SELECT img_url,class_name from img_info where img_id = {}".format( result_classes[result_class][0][1])) cursor.execute( "SELECT img_url,class_name from img_info where img_id = {}".format( result_classes[result_class][0][1])) img_info = cursor.fetchone() img_path = img_info[0].replace("\\", "/") class_name = img_info[1] # img_name = re.search(r"static(.*\.jpg)",img_path).group(1) # img_path = "jpg/{}".format(img_name) return img_path, class_name # img = cv2.imread("static/jpg/image_00001.jpg") # textprocess(img)
def flower_info(): class_name = request.values["name"] conn = get_connection() cursor = conn.cursor() cursor.execute( "SELECT * FROM flower_info WHERE name='{}'".format(class_name)) result = cursor.fetchone() return { "title": result[1], "description": result[3], "distribution": result[4], "more_info": result[5], "img_url": result[2].replace("\\", "/") }
def get_module_names(): """get all the to import for starting the scrapping module_names will be a list of modules to import """ modules = [] mydb = db_connection.get_connection() if mydb != 1: cursor = mydb.cursor() cursor.execute("select scrap from `bookie_web`;") records = cursor.fetchall() for record in records: modules.append(record[0]) # web_bookies[record] return modules
def generate_batch(size): types = ["train", "valid"] conn = get_connection() cursor = conn.cursor() cursor.execute("SELECT img_id from hash_code_full;") saved = cursor.fetchall() saved = [s[0] for s in saved] cur_num = 0 img_urls = [] cur_train_batch = np.zeros((size, 224, 224, 3)) cur_label = np.zeros(size, dtype=np.int8) processed_id = set() for data_type in types: for dir in os.listdir("processed_data/" + data_type): for filename in os.listdir( os.path.join("processed_data", data_type, dir)): img_id = re.search(r"\d_\d_image_(.*).jpg", filename).group(1) img_original_name = re.search(r"\d_\d_(.*)", filename).group(1) if int(img_id) in processed_id or int(img_id) in saved: continue processed_id.add(int(img_id)) img = cv2.imread( os.path.join("static", "jpg", img_original_name)) img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) img_size = (224, 224) img_resize = cv2.resize(img_gray, img_size, interpolation=cv2.INTER_AREA) img_data = img_resize / 256 cur_train_batch[cur_num] = img_data if cur_num == 99: print(99) cur_label[cur_num] = dir img_urls.append( os.path.join("static", "jpg", img_original_name)) cur_num += 1 if cur_num == size: yield (cur_train_batch, cur_label, img_urls) cur_num = 0 cur_train_batch = np.zeros((size, 224, 224, 3)) cur_label = np.zeros(size, dtype=np.int8) img_urls = [] yield (cur_train_batch, cur_label, img_urls) # # a = generate_stochastic_train_batch(10) # next(a)
def save_to_db_hash_code(): size = 100 img_batches = generate_batch(size) for batch in img_batches: (img_input, img_labels, img_urls) = batch features, hashcodes = generate_feature_vector_and_hash(img_input) conn1 = get_connection() cursor1 = conn1.cursor() for i in range(0, len(img_urls)): hashcode = hashcodes[i] img_id = re.search(r"image_(.*).jpg", img_urls[i]).group(1) cursor1.execute( "INSERT INTO hash_code_full ( img_id, hash_code) VALUES(%s, %s);", [img_id, str(hashcode)]) conn1.commit()
def remove_user(user_id): try: conn = db_connection.get_connection() cursor = conn.cursor() SQL_QUERY = "DELETE FROM user_personal_info WHERE id=%s" cursor.execute(SQL_QUERY, (user_id, )) if cursor.rowcount > 0: status = "success" else: status = "not-found" except (Exception, psycopg2.Error) as error: print(error) status = "error" return status
def update_user_steps(user_id, steps, date): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "UPDATE steps SET steps = %s WHERE id = %s AND date= %s" value = (steps, user_id, date) cursor.execute(SQL_QUERY, value) conn.commit() if cursor.rowcount > 0: status = "Success" else: status = "Could Not insert" except (Exception, psycopg2.Error) as Error: print(Error) status = 'failed' return status
def create_a_file(self, directory, unprocessed_submit_id): """ :param directory: Directory to process particular submit in. :param unprocessed_submit_id: integer field from database :return: None """ java_file_path = os.path.join(directory, 'Main.java') file = open(java_file_path, mode='w+', encoding='utf-8') conn = db_connection.get_connection() cur = conn.cursor() query = 'SELECT _code_content FROM get_code_content_from_submit(%s);' cur.execute(query, (unprocessed_submit_id, )) code_content = cur.fetchall()[0][0] conn.close() file.write(code_content) file.close()
def save_compilation_outcome_to_submit(self, compilation_outcome, submit_id): conn = db_connection.get_connection() conn.autocommit = True cur = conn.cursor() compilation_return_code, compilation_stdout, compilation_stderr, compilation_time_ms = compilation_outcome query = """UPDATE submit SET compilation_return_code = %s, compilation_stdout = %s, compilation_stderr = %s, compilation_time_ms = %s WHERE id = %s;""" cur.execute(query, (compilation_return_code, compilation_stdout, compilation_stderr, compilation_time_ms, submit_id)) conn.close()
def get_web_bookies(): """get all the web url of our bookies in order to start scrapping web_bookies will be a dictionay with name as index and url as its related value""" web_bookies = {} mydb = db_connection.get_connection() if mydb != 1: cursor = mydb.cursor() cursor.execute("select name, url from `bookie_web`;") records = cursor.fetchall() for record in records: name = record[0] url = record[1] web_bookies[name] = url # web_bookies[record] return web_bookies
def add_gps_cordianates(user_id, long, lat, timestamp): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "INSERT INTO gps_location(id,gps_long,gps_lat,time) VALUES(%s,%s,%s,%s)" value = (user_id, long, lat, timestamp) print(type(timestamp)) cursor.execute(SQL_QUERY, value) conn.commit() if cursor.rowcount > 0: status = "Success" else: status = "Could Not insert" except (Exception, psycopg2.Error) as error: print(error) status = "Could Not insert" return status
def add_calories_information(user_id,date,calories_burnt,calories_consumed,misc): try: conn=db_connection.get_connection() cursor=conn.cursor() SQL_QUERY="INSERT INTO calories(id,date,calories_burnt,calories_consumed,misc) VALUES(%s,%s,%s,%s,%s)" value=(user_id,date,calories_burnt,calories_consumed,misc) cursor.execute(SQL_QUERY,value) conn.commit() if cursor.rowcount >0: status="Success" else: status="Could Not insert" except (Exception,psycopg2.Error) as error: print(error) status="failed" return status
def add_login_details(user_id, accsess_token, email_id, password, misc): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "INSERT INTO login (id,access_tkn,email_id,password,misc) VALUES (%s,%s,%s,%s,%s)" value = (user_id, accsess_token, email_id, password, None) cursor.execute(SQL_QUERY, value) conn.commit() if cursor.rowcount > 0: status = "success" else: status = "could not insert" except (Exception, psycopg2.Error) as error: print(error) status = "failed" return status
def add_to_database(user_id, username, first_name, last_name, photo_url, misc, email): conn = db_connection.get_connection() cursor = conn.cursor() status = "" try: SQL_QUERY = "INSERT INTO user_personal_info(id,username,first_name,last_name,photo_url,misc,email) VALUES(%s,%s,%s,%s,%s,%s,%s)" values = (user_id.strip(), username.strip(), first_name.strip(), last_name, photo_url, misc, email) cursor.execute(SQL_QUERY, values) conn.commit() status = "success" except (Exception, psycopg2.Error) as error: print(error) status = "failed" return status
def authenticate_user(email_id, password): conn = db_connection.get_connection() cursor = conn.cursor() try: SQL_QUERY = "SELECT access_tkn from login WHERE email_id=%s AND password=%s" value = (email_id, password) cursor.execute(SQL_QUERY, value) result = cursor.fetchall() if len(result) >= 1: # Entry is in the database status = True else: # Entry not in database status = False except (Exception, psycopg2.Error) as error: print(error) status = "failed" return status
student_id = 0 task_id = 0 code = '' task_xml = {} name = '' cookies = Cookie.SimpleCookie(os.environ.get("HTTP_COOKIE","")) task_info = cgi.FieldStorage() if task_info.has_key('task_id') and cookies.has_key('id') and cookies.has_key('type') : if cookies['type'] == 'Teacher': html_header += 'Location: index.py' else: html_header += str(cookies) task_id = task_info['task_id'].value student_id = cookies['id'].value cursor = db_connection.get_connection() cursor.execute('SELECT FirstName, LastName FROM %s WHERE %sID=%s' % (cookies['type'].value,cookies['type'].value,str(cookies['id'].value))) record = cursor.fetchone() name = record['FirstName']+' '+record['LastName'] new_flag = 0 curr_date = datetime.datetime.now() task_xml = task_delivery.get_task_xml(task_id)['task'] try: cursor.execute("""SELECT Attempts, ProgressID FROM Progress WHERE StudentID=%s AND TaskID=%s""" % (str(student_id),str(task_id))) if cursor.rowcount == 0: new_flag = 1 else: progress_record = cursor.fetchone() cursor.execute("""UPDATE Progress SET DateModified=%s,
def save_code(code, task_id, student_id): cursor = db_connection.get_connection() curr_date = datetime.datetime.now() sql="UPDATE Progress SET Code='%s', DateModified='%s' WHERE TaskID=%s AND StudentID=%s" % (code.replace("'","''"),str(curr_date),str(task_id), str(student_id)) cursor.execute(sql)
def get_task_list(): cursor = db_connection.get_connection() sql = 'SELECT * FROM Task' cursor.execute(sql) out = cursor.fetchall() return out