def get(self, id): conn, cur = df.get_conn() uid = uf.get_user_id() # uid = 'or 1=1#' cur.execute("SELECT * FROM subscriptions WHERE userid = %s AND podcastid = %s;", (uid, id)) # print(cur.rowcount) flag = False if cur.rowcount != 0: flag = True cur.execute("SELECT xml, id, rssfeed FROM Podcasts WHERE id=(%s)", (id,)) res = cur.fetchone() if res is None: return {}, 404 xml = res[0] id = res[1] rssfeed=res[2] cur.execute("SELECT count(*) from subscriptions where podcastid=(%s)", (id,)) res = cur.fetchone() subscribers = 0 if res is not None: subscribers = res[0] cur.execute("SELECT rating from ratingsview where id=%s", (id,)) res = cur.fetchone() if res: # rating = int(round(res[0],1)) rating = f"{res[0]:.1f}" print(rating) df.close_conn(conn,cur) thread = threading.Thread(target=update_rss, args=(rssfeed, conn_pool), daemon=True) thread.start() return {"xml": xml, "id": id, "subscription": flag, "subscribers": subscribers, "rating": rating}, 200
def put(self, notificationId): status = request.json.get("status") if status is None: return {"data": "must include status field"}, 400 if not isinstance(status, str) and status not in [ 'read', 'unread', 'dismissed' ]: return { "data": "status must be one of read, undread, or dismissed" }, 400 conn, cur = df.get_conn() user_id = get_user_id() cur.execute( """ update Notifications set status=%s where id=%s and userid=%s returning id """, (status, notificationId, user_id)) results = cur.fetchall() if len(results) > 1: conn.rollback() df.close_conn(conn, cur) return { "data": "unexpectedly modified more than 1 notification. rolling back" }, 500 conn.commit() df.close_conn(conn, cur) if len(results) == 0: return { "data": "No notification associated with id {} and userId {}".format( notificationId, user_id) }, 404 return {}, 200
def get(self): conn, cur = df.get_conn() user_id = uf.get_user_id() cur.execute("SELECT email FROM users WHERE id=%s", (user_id, )) email = cur.fetchone()[0] df.close_conn(conn, cur) return {"email": email}
def get(self, id): # id is pageNum parser = reqparse.RequestParser() parser.add_argument('limit', type=int, required=False, location="args") args = parser.parse_args() # get user defined limit or set to default limit = args['limit'] if args['limit'] is not None else 12 if limit <= 0 or id <= 0: return {"error": "bad request"}, 400 offset = (id - 1)*limit conn, cur = df.get_conn() user_id = uf.get_user_id() # if first page get all results to determine amount of pages if id == 1: cur.execute("SELECT p.id, p.xml, l.episodeguid, l.listenDate, l.timestamp FROM listens l, podcasts p where l.userid=%s and \ p.id = l.podcastid ORDER BY l.listenDate DESC",(user_id,)) # calculate total pages based on limit total_pages = math.ceil( cur.rowcount / limit ) else: cur.execute("SELECT p.id, p.xml, l.episodeguid, l.listenDate, l.timestamp FROM listens l, podcasts p where l.userid=%s and \ p.id = l.podcastid ORDER BY l.listenDate DESC LIMIT %s OFFSET %s", (user_id, limit, offset)) eps = cur.fetchmany(limit) # change to episodes jsoneps = [{"pid" : ep[0], "xml": ep[1], "episodeguid": ep[2], "listenDate": ep[3].timestamp(), "timestamp": ep[4]} for ep in eps] df.close_conn(conn, cur) return jsonify(history=jsoneps, numPages=total_pages if id == 1 else '', status=200)
def delete(self, notificationId): conn, cur = df.get_conn() user_id = get_user_id() cur.execute( """ update notifications set status='dismissed' where id=%s and userId=%s returning id """, (notificationId, user_id)) results = cur.fetchall() if len(results) > 1: conn.rollback() df.close_conn(conn, cur) return { "data": "unexpectedly deleted more than 1 notification. rolling back" }, 500 conn.commit() df.close_conn(conn, cur) if len(results) == 0: return { "data": "No notification associated with id {} and userId {}".format( notificationId, user_id) }, 404 return {}, 200
def get(self): conn, cur = df.get_conn() uid = uf.get_user_id() cur.execute( """SELECT p.title, p.xml, p.id FROM podcasts p FULL OUTER JOIN subscriptions s on s.podcastId = p.id WHERE s.userID = %s; """, (uid, )) podcasts = cur.fetchall() # grabs the subscribed podcast for the user results = [] for p in podcasts: # print(p[1]) search = re.search('<guid.*>(.*)</guid>', p[1]) #This Regex searchs for the guid guid = search.group(1) cur.execute( "SELECT complete FROM Listens where episodeGuid =%s AND userId = %s;", (guid, uid)) res = cur.fetchone() if res is None or res[ 0] == False: # if the episode has been completely watched then we don't add it to the subscription panel title = p[0] xml = p[1] pid = p[2] results.append({ "title": title, "xml": xml, "pid": pid, "guid": guid }) df.close_conn(conn, cur) return results, 200
def get(self, id): conn, cur = df.get_conn() user_id = uf.get_user_id() cur.execute("SELECT rating FROM podcastratings WHERE podcastid=%s and userid=%s", (id, user_id)) res = cur.fetchone() rating = res[0] if res else None df.close_conn(conn, cur) return {"rating": rating}, 200
def delete(self, podcastId): conn, cur = df.get_conn() userID = get_user_id() cur.execute( "DELETE FROM subscriptions WHERE userid = %s AND podcastid = %s;", (userID, podcastId)) conn.commit() df.close_conn(conn, cur) return {"data": "subscription deleted"}, 200
def delete(self): conn, cur = df.get_conn() userID = get_user_id() parser = reqparse.RequestParser(bundle_errors=True) parser.add_argument('podcastid', type=str, location="json") args = parser.parse_args() podcastID = args["podcastid"] cur.execute("DELETE FROM subscriptions WHERE userid = %s AND podcastid = %s;", (userID, podcastID)) conn.commit() df.close_conn(conn,cur) return {"data" : "subscription deleted"}, 200
def post(self): conn, cur = df.get_conn() userID = get_user_id() parser = reqparse.RequestParser(bundle_errors=True) #grabbing podcastid from request body parser.add_argument('podcastid', type=str, location="json") args = parser.parse_args() podcastID = args["podcastid"] cur.execute("INSERT INTO subscriptions(userid, podcastid) VALUES (%s,%s);", (userID, podcastID)) #inserting subscription conn.commit() df.close_conn(conn, cur) return {'data' : "subscription successful"}, 200
def put(self, podcastId): conn, cur = df.get_conn() user_id = uf.get_user_id() timestamp = request.json.get("time") episodeGuid = request.json.get("episodeGuid") duration = request.json.get("duration") if timestamp is None: df.close_conn(conn, cur) return {"error": "timestamp not included"}, 400 if not isinstance(timestamp, int): df.close_conn(conn, cur) return {"error": "timestamp must be an integer"}, 400 if episodeGuid is None: df.close_conn(conn, cur) return {"error": "episodeGuid not included"}, 400 if duration is None: df.close_conn(conn, cur) return {"error": "duration is not included"}, 400 # calculate if the episode is complete. we consider complete as being 95% of the way though the podcast # sometimes if the front end can't get the duration it sends it as -1. # (I think because it sends a request before the metadata has loaded, which shouldn't happen) # If the duration is less than 0 we'll treat it as not complete complete = (timestamp >= 0.95 * duration) if duration >= 0 else False # if the duration is greater than 0 we'll try to update the episode to include the duration if (duration > 0): try: cur.execute( """ update episodes set duration=%s where guid=%s and podcastId=%s """, (duration, episodeGuid, podcastId)) except Exception as e: df.close_conn(conn, cur) return { "error": "Failed to update episodes, probably because the episode does not exist:\n{}" .format(str(e)) }, 400 cur.execute( """ INSERT INTO listens (userId, podcastId, episodeGuid, listenDate, timestamp, complete) values (%s, %s, %s, now(), %s, %s) ON CONFLICT ON CONSTRAINT listens_pkey DO UPDATE set listenDate=now(), timestamp=%s, complete=%s; """, (user_id, podcastId, episodeGuid, timestamp, complete, timestamp, complete)) conn.commit() df.close_conn(conn, cur) return {}, 200
def put(self,id): conn, cur = df.get_conn() user_id = uf.get_user_id() # get ratings limited to 1 to 5 parser = reqparse.RequestParser() parser.add_argument('rating', type=int, required=True, choices=(1,2,3,4,5), help="Rating not valid", location="json") args = parser.parse_args() #check if already rated cur.execute("SELECT rating FROM podcastratings where userid=%s and podcastid=%s", (user_id, id)) if cur.fetchone(): cur.execute("UPDATE podcastratings SET rating=%s WHERE userid=%s and podcastid=%s", (args["rating"], user_id, id)) else: cur.execute("INSERT INTO podcastratings (userid, podcastid, rating) VALUES (%s, %s, %s)", (user_id, id, args["rating"])) conn.commit() return {"success": "added"}
def put(self): user_id = uf.get_user_id() conn, cur = df.get_conn() #get arguments from json request body parser = reqparse.RequestParser(bundle_errors=True) parser.add_argument('oldpassword', type=str, required=True, help="Need old password", location="json") parser.add_argument('newpassword', type=str, location="json") parser.add_argument('newemail', type=str, location="json") args = parser.parse_args() hashedpassword = "" # check current password cur.execute("SELECT hashedpassword FROM users WHERE id=%s", (user_id, )) old_pw = cur.fetchone()[0].strip() if bcrypt.checkpw(args["oldpassword"].encode('UTF-8'), old_pw.encode('utf-8')): if args["newpassword"]: if args["oldpassword"] != args["newpassword"]: # change password password = args["newpassword"] password = password.encode('UTF-8') hashedpassword = bcrypt.hashpw(password, bcrypt.gensalt()) if args['newemail']: # change email cur.execute("SELECT email FROM users where email=%s", (args['newemail'], )) if cur.fetchone(): cur.execute( "SELECT email FROM users where email=%s and id=%s", (args['newemail'], user_id)) if not cur.fetchone(): df.close_conn(conn, cur) return {"error": "Email already exists"}, 400 cur.execute("UPDATE users SET email=%s WHERE id=%s", (args['newemail'], user_id)) if hashedpassword: cur.execute("UPDATE users SET hashedpassword=%s WHERE id=%s", (hashedpassword.decode('UTF-8'), user_id)) conn.commit() df.close_conn(conn, cur) return {"data": "success"}, 200 df.close_conn(conn, cur) return {"error": "wrong password"}, 400
def get(self): conn, cur = df.get_conn() user_id = uf.get_user_id() recs = [] cur.execute("select distinct * from recommendations(%s)", (user_id, )) results = cur.fetchall() recs = [{ "title": i[0], "thumbnail": i[1], "id": i[2], "subs": i[3], "eps": i[4], "rating": f"{i[5]:.1f}" } for i in results] df.close_conn(conn, cur) return {"recommendations": recs}
def get(self, podcastId): conn, cur = df.get_conn() user_id = uf.get_user_id() cur.execute( """ select episodeGuid, listenDate, timestamp, complete from listens where userid=%s and podcastid=%s """, (user_id, podcastId)) res = cur.fetchall() df.close_conn(conn, cur) jsonready = [{ "episodeGuid": x[0], "listenDate": str(x[1]), "timestamp": x[2], "complete": x[3] } for x in res] return jsonready, 200
def get(self, podcastId): conn, cur = df.get_conn() user_id = uf.get_user_id() episodeGuid = request.json.get("episodeGuid") if episodeGuid is None: df.close_conn(conn, cur) return {"error": "episodeGuid not included"}, 400 cur.execute( """ SELECT timestamp, complete from listens where podcastId=%s and episodeGuid=%s and userId=%s """, (podcastId, episodeGuid, user_id)) res = cur.fetchone() df.close_conn(conn, cur) if res is None: return {"error": "invalid podcastId or episodeGuid"}, 400 return {"time": int(res[0]), "complete": res[1]}, 200
def get(self): conn, cur = df.get_conn() user_id = uf.get_user_id() cur.execute( """ select p.rssfeed from subscriptions s join podcasts p on s.podcastId=p.id where s.userId = %s """, (user_id, )) results = cur.fetchall() subscribedPodcasts = [] if results: subscribedPodcasts = [x[0] for x in results] for sp in subscribedPodcasts: thread = threading.Thread(target=update_rss, args=(sp, conn_pool), daemon=True) thread.start() cur.execute( """ select p.title, p.id, e.title, e.created, e.guid, u.status, u.id from notifications u join episodes e on u.episodeguid=e.guid join podcasts p on e.podcastid=p.id where u.userid=%s and (u.status='read' or u.status='unread') order by e.created desc """, (user_id, )) results = cur.fetchall() df.close_conn(conn, cur) if results is None: return {}, 200 json = [{ "podcastTitle": x[0], "podcastId": x[1], "episodeTitle": x[2], "dateCreated": str(x[3]), "episodeGuid": x[4], "status": x[5], "id": x[6] } for x in results] return json, 200
def get(self): conn, cur = df.get_conn() uid = get_user_id() cur.execute("SELECT p.title, p.author, p.description, p.id, r.rating, p.thumbnail FROM podcasts p, ratingsview r, subscriptions s \ WHERE s.podcastId = p.id and s.userID = %s and r.id = p.id;", (uid,)) podcasts = cur.fetchall() # grabs all podcasts taht user is subscribed to results = [] for p in podcasts: cur.execute("select count(podcastId) FROM subscriptions where podcastId = %s GROUP BY podcastId;", (p[3],)) subscribers = cur.fetchone() title = p[0] author = p[1] description = p[2] pID = p[3] rating = f"{p[4]:.1f}" thumbnail = p[5] results.append({"subscribers" : subscribers, "title" : title, "author" : author, "description" : description, "pid" : pID, "rating": rating, "thumbnail": thumbnail}) df.close_conn(conn, cur) return results, 200
def delete(self): conn, cur = df.get_conn() user_id = get_user_id() parser = reqparse.RequestParser(bundle_errors=True) parser.add_argument('password', type=str, required=True, help="Need old password", location="json") args = parser.parse_args() cur.execute("SELECT hashedpassword FROM users WHERE id=%s", (user_id, )) old_pw = cur.fetchone()[0].strip() if bcrypt.checkpw(args["password"].encode('UTF-8'), old_pw.encode('utf-8')): # delete all subscriptions cur.execute("DELETE FROM subscriptions WHERE userId=%s", (user_id, )) # delete podcast account cur.execute("DELETE FROM podcastratings WHERE userId=%s", (user_id, )) # delete episode ratings cur.execute("DELETE FROM episoderatings WHERE userId=%s", (user_id, )) # delete listens cur.execute("DELETE FROM listens WHERE userId=%s", (user_id, )) # delete seach queries cur.execute("DELETE FROM searchqueries WHERE userId=%s", (user_id, )) cur.execute("DELETE FROM notifications WHERE userid=%s", (user_id, )) # delete from users cur.execute("DELETE FROM users WHERE id=%s", (user_id, )) conn.commit() df.close_conn(conn, cur) return {"data": "account deleted"}, 200 else: df.close_conn(conn, cur) return {"error": "wrong password"}, 400
def get(self): search = request.args.get('search_query') if search is None: return {"error": "Bad Request"}, 400 conn, cur = df.get_conn() # adding search query to db user_id = uf.get_user_id() if user_id: cur.execute( "insert into searchqueries (userid, query, searchdate) values (%s, %s, %s)", (user_id, search, datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))) conn.commit() startNum = request.args.get('offset') limitNum = request.args.get('limit') cur.execute( """SELECT count(s.podcastid), v.title, v.author, v.description, v.id, v.thumbnail, rv.rating FROM searchvector v FULL OUTER JOIN Subscriptions s ON s.podcastId = v.id LEFT JOIN ratingsview rv ON v.id = rv.id WHERE v.vector @@ plainto_tsquery(%s) GROUP BY (s.podcastid, v.title, v.author, v.description, v.id, v.vector, v.thumbnail, rv.rating) ORDER BY ts_rank(v.vector, plainto_tsquery(%s)) desc; """, (search, search)) podcasts = cur.fetchall( ) # this query grabs the podcasts that directly match the search within the title or author names cur.execute( """SELECT DISTINCT p.id, p.title, p.author, p.description, ps.count, p.thumbnail, rv.rating FROM podcasts p LEFT JOIN podcastcategories t ON t.podcastid = p.id LEFT JOIN categories c ON t.categoryid = c.id LEFT JOIN podcastsubscribers ps ON ps.id = p.id LEFT JOIN ratingsview rv ON p.id = rv.id WHERE to_tsvector(c.name) @@ plainto_tsquery(%s) and p.id not in (select podcastid from search(%s)); """, (search, search)) categories = cur.fetchall( ) # this query grabs the podcasts which match the searched text with any categories an returns those that do not clash with the previous query results = [] # grabbing the results and putting them into json formatting for p in podcasts: subscribers = p[0] title = p[1] author = p[2] description = p[3] pID = p[4] thumbnail = p[5] rating = f"{p[6]:.1f}" results.append({ "subscribers": subscribers, "title": title, "author": author, "description": description, "pid": pID, "thumbnail": thumbnail, "rating": rating }) for c in categories: results.append({ "subscribers": c[4], "title": c[1], "author": c[2], "description": c[3], "pid": c[0], "thumbnail": c[5], "rating": f"{c[6]:.1f}" }) df.close_conn(conn, cur) return results, 200