def get(self, time_range): """Gets trailing app name metrics from matview""" if time_range not in valid_trailing_time_periods: abort_bad_request_param('time_range', ns) parsed = trailing_app_name_parser.parse_args() args = {"limit": parsed.get("limit", 10), "time_range": time_range} metrics = get_trailing_app_metrics(args) response = success_response(metrics) return response
def get(self): args = verify_token_parser.parse_args() # 1. Break JWT into parts token_parts = args["token"].split(".") if not len(token_parts) == 3: abort_bad_request_param("token", ns) # 2. Decode the signature try: signature = base64.urlsafe_b64decode(token_parts[2] + "==") except Exception: ns.abort(400, "The JWT signature could not be decoded.") signature = signature.decode() base64_header = token_parts[0] base64_payload = token_parts[1] message = f"{base64_header}.{base64_payload}" # 3. Recover message from signature web3 = web3_provider.get_web3() wallet = None encoded_message = encode_defunct(text=message) try: wallet = web3.eth.account.recover_message( encoded_message, signature=signature, ) except Exception: ns.abort( 404, "The JWT signature is invalid - wallet could not be recovered." ) if not wallet: ns.abort( 404, "The JWT signature is invalid - wallet could not be recovered." ) # 4. Check that user from payload matches the user from the wallet from the signature try: stringified_payload = base64.urlsafe_b64decode(base64_payload + "==") payload = json.loads(stringified_payload) except Exception: ns.abort(400, "JWT payload could not be decoded.") wallet_user_id = get_user_with_wallet(wallet) if not wallet_user_id or wallet_user_id != payload["userId"]: ns.abort( 404, "The JWT signature is invalid - the wallet does not match the user.", ) # 5. Send back the decoded payload return success_response(payload)
def get(self): args = full_track_route_parser.parse_args() slug, handle = args.get("slug"), args.get("handle") routes = args.get("route") permalinks = args.get("permalink") current_user_id = get_current_user_id(args) ids = args.get("id") routes = (routes or []) + (permalinks or []) if not ((slug and handle) or routes or ids): full_ns.abort(400, "Expected query param 'permalink' or 'id'") elif ids and (routes or (slug and handle)): full_ns.abort( 400, "Ambiguous query params: Expected one of 'id', 'permalink' but not both", ) routes_parsed = routes if routes else [] try: routes_parsed = parse_routes(routes_parsed) except IndexError: abort_bad_request_param("permalink", full_ns) if slug and handle: routes_parsed.append({"handle": handle, "slug": slug}) if ids: tracks = get_tracks({ "with_users": True, "id": decode_ids_array(ids), "current_user_id": current_user_id, }) else: tracks = get_tracks({ "with_users": True, "routes": routes_parsed, "current_user_id": current_user_id, }) if not tracks: if handle and slug: abort_not_found(f"{handle}/{slug}", full_ns) elif routes: abort_not_found(routes, full_ns) else: abort_not_found(ids, full_ns) # For backwards compatibility, the old handle/slug route returned an object, not an array if handle and slug: tracks = extend_track(tracks[0]) else: tracks = [extend_track(track) for track in tracks] return success_response(tracks)
def get(self, time_range): """Gets aggregated route metrics based on time range and bucket size""" if time_range not in valid_bucket_sizes: abort_bad_path_param('time_range', ns) args = aggregate_route_metrics_parser.parse_args() valid_buckets = valid_bucket_sizes[time_range] bucket_size = args.get("bucket_size") or valid_buckets[0] if bucket_size not in valid_buckets: abort_bad_request_param('bucket_size', ns) metrics = get_aggregate_route_metrics(time_range, bucket_size) response = success_response(metrics) return response
def get(self, app_name): """Get the app name metrics""" args = metrics_app_name_parser.parse_args() if args.get('limit') is None: args['limit'] = 48 else: args['limit'] = min(args.get('limit'), 48) try: args['start_time'] = parse_unix_epoch_param( args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) app_name_metrics = get_app_name_metrics(app_name, args) response = success_response(app_name_metrics) return response
def get(self): args = search_parser.parse_args() query = args["query"] if not query: abort_bad_request_param("query", ns) search_args = { "query": query, "kind": SearchKind.users.name, "is_auto_complete": False, "current_user_id": None, "with_users": True, "limit": 10, "offset": 0, } response = search(search_args) return success_response(response)
def get(self): args = metrics_genres_parser.parse_args() if args.get('limit') is None: args['limit'] = 100 else: args['limit'] = min(args.get('limit'), 100) if args.get('offset') is None: args['offset'] = 0 try: args['start_time'] = parse_unix_epoch_param(args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) genre_metrics = get_genre_metrics(args) response = success_response(genre_metrics) return response
def get(self): """List all the app names""" args = metrics_app_name_list_parser.parse_args() if args.get('limit') is None: args['limit'] = 100 else: args['limit'] = min(args.get('limit'), 100) if args.get('offset') is None: args['offset'] = 0 try: args['start_time'] = parse_unix_epoch_param(args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) app_names = get_app_names(args) response = success_response(app_names) return response
def get(self): args = metrics_genres_parser.parse_args() if args.get("limit") is None: args["limit"] = 100 else: args["limit"] = min(args.get("limit"), 100) if args.get("offset") is None: args["offset"] = 0 try: args["start_time"] = parse_unix_epoch_param(args.get("start_time"), 0) except Exception: abort_bad_request_param("start_time", ns) genre_metrics = get_genre_metrics(args) response = success_response(genre_metrics) return response
def get(self): """List all the app names""" args = metrics_app_name_list_parser.parse_args() if args.get("limit") is None: args["limit"] = 100 else: args["limit"] = min(args.get("limit"), 100) if args.get("offset") is None: args["offset"] = 0 try: args["start_time"] = parse_unix_epoch_param(args.get("start_time"), 0) except Exception: abort_bad_request_param("start_time", ns) app_names = get_app_names(args) response = success_response(app_names) return response
def get(self): """Gets top playlists.""" args = top_parser.parse_args() if args.get('limit') is None: args['limit'] = 100 else: args['limit'] = min(args.get('limit'), 100) if args.get('offset') is None: args['offset'] = 0 if args.get('type') not in ['album', 'playlist']: abort_bad_request_param('type', ns) args['with_users'] = True response = get_top_playlists(args.type, args) playlists = list(map(extend_playlist, response)) return success_response(playlists)
def get(self): """Search for a playlist.""" args = search_parser.parse_args() query = args["query"] if not query: abort_bad_request_param('query', ns) search_args = { "query": query, "kind": SearchKind.playlists.name, "is_auto_complete": False, "current_user_id": None, "with_users": True, "limit": 10, "offset": 0 } response = search(search_args) playlists = list(map(extend_playlist, response["playlists"])) return success_response(playlists)
def get(self): """Search for a track.""" args = search_parser.parse_args() query = args["query"] if not query: abort_bad_request_param('query', ns) search_args = { "query": query, "kind": SearchKind.tracks.name, "is_auto_complete": False, "current_user_id": None, "with_users": True, "limit": 10, "offset": 0, "only_downloadable": args["only_downloadable"] } response = search(search_args) tracks = response["tracks"] tracks = list(map(extend_track, tracks)) return success_response(tracks)
def get(self): args = metrics_plays_parser.parse_args() if args.get('limit') is None: args['limit'] = 168 else: args['limit'] = min(args.get('limit'), 168) try: args['start_time'] = parse_unix_epoch_param(args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) if args.get('bucket_size') is None: args['bucket_size'] = 'hour' if args.get('bucket_size') not in valid_date_buckets: abort_bad_request_param('bucket_size', ns) plays_metrics = get_plays_metrics(args) response = success_response(plays_metrics) return response
def get(self): args = metrics_plays_parser.parse_args() if args.get("limit") is None: args["limit"] = 168 else: args["limit"] = min(args.get("limit"), 168) try: args["start_time"] = parse_unix_epoch_param(args.get("start_time"), 0) except Exception: abort_bad_request_param("start_time", ns) if args.get("bucket_size") is None: args["bucket_size"] = "hour" if args.get("bucket_size") not in valid_date_buckets: abort_bad_request_param("bucket_size", ns) plays_metrics = get_plays_metrics(args) response = success_response(plays_metrics) return response
def get(self): """Get the route metrics""" args = metrics_route_parser.parse_args() if args.get('limit') is None: args['limit'] = 168 else: args['limit'] = min(args.get('limit'), 168) if args.get('bucket_size') is None: args['bucket_size'] = 'hour' if args.get('bucket_size') not in valid_date_buckets: abort_bad_request_param('bucket_size', ns) try: args['start_time'] = parse_unix_epoch_param(args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) if args.get('exact') is not None: args['exact'] = parse_bool_param(args.get('exact')) if args.get('exact') is None: abort_bad_request_param('exact', ns) else: args['exact'] = False args['path'] = args.get('path') if args.get('path') is not None else '' route_metrics = get_route_metrics(args) response = success_response(route_metrics) return response
def get(self): """Get the route metrics""" args = metrics_route_parser.parse_args() if args.get("limit") is None: args["limit"] = 168 else: args["limit"] = min(args.get("limit"), 168) if args.get("bucket_size") is None: args["bucket_size"] = "hour" if args.get("bucket_size") not in valid_date_buckets: abort_bad_request_param("bucket_size", ns) try: args["start_time"] = parse_unix_epoch_param(args.get("start_time"), 0) except Exception: abort_bad_request_param("start_time", ns) if args.get("exact") is not None: args["exact"] = parse_bool_param(args.get("exact")) if args.get("exact") is None: abort_bad_request_param("exact", ns) else: args["exact"] = False args["path"] = args.get("path") if args.get("path") is not None else "" route_metrics = get_route_metrics(args) response = success_response(route_metrics) return response
def get(self, app_name): """Get the app name metrics""" args = metrics_app_name_parser.parse_args() if args.get('limit') is None: args['limit'] = 168 else: args['limit'] = min(args.get('limit'), 168) try: args['start_time'] = parse_unix_epoch_param(args.get('start_time'), 0) except: abort_bad_request_param('start_time', ns) if args.get('bucket_size') is None: args['bucket_size'] = 'hour' if args.get('bucket_size') not in valid_date_buckets: abort_bad_request_param('bucket_size', ns) app_name_metrics = get_app_name_metrics(app_name, args) response = success_response(app_name_metrics) return response
def get(self): """ Resolves and redirects a provided Audius app URL to the API resource URL it represents. This endpoint allows you to lookup and access API resources when you only know the audius.co URL. Tracks, Playlists, and Users are supported. """ args = resolve_route_parser.parse_args() url = args.get("url") if not url: abort_bad_request_param("url", ns) try: db = db_session.get_db_read_replica() with db.scoped_session() as session: resolved_url = resolve_url(session, url) if not resolved_url: return abort_not_found(url, ns) return redirect(resolved_url, code=302) except Exception as e: logger.warning(e) abort_not_found(url, ns)