def update_songs(): if db.is_empty(): # Database is empty raise InvalidUsage(f"Resource does not exists. Try \"CREATE\" instead", status_code=409) # Verify payload try: payload_json = request.json except Exception as e: raise InvalidUsage(f"Payload is not a valid json", status_code=409, payload={"exception": str(e)}) try: song_list = list(map(db.Song.from_json_dict, payload_json)) except Exception as e: raise InvalidUsage( f"JSON can not be converted into valid song data. See payload for required columns", status_code=409, payload={ "exception": str(e), "columns": [field.name for field in dataclasses.fields(db.Song)] }) # Bulk update songs (update existing and add new ones) db.put_songs(song_list) return "Succes: Existing entries which overlap with new input in the 'song_id' field have been overwritten, new songs have been added to the database", 202
def update_song(song_id): if not db.is_in_database(song_id): raise InvalidUsage( f"Song with song_id {song_id} does not exists in database. Try \"POST\"", status_code=409) # Verify payload try: payload_json = request.json except Exception as e: raise InvalidUsage(f"Payload is not a valid json", status_code=409, payload={"exception": str(e)}) try: song_list = list(map(db.Song.from_dict, payload_json)) except Exception as e: raise InvalidUsage( f"JSON can not be converted into valid song data. See payload for required columns", status_code=409, payload={ "exception": str(e), "columns": [field.name for field in dataclasses.fields(db.Song)] }) db.put_songs(song_list) return f"Updated song at /songs/{song_id}", 202
def create_songs(): if not db.is_empty(): # Database is not empty raise InvalidUsage( f"Resource already exists. Try \"PUT\" or \"DELETE\"", status_code=409) # Verify payload try: payload_json = request.json except Exception as e: raise InvalidUsage(f"Payload is not a valid json", status_code=409, payload={"exception": str(e)}) try: song_list = list(map(db.Song.from_json_dict, payload_json)) except Exception as e: raise InvalidUsage( f"JSON can not be converted into valid song data. See payload for required columns", status_code=409, payload={ "exception": str(e), "columns": [field.name for field in dataclasses.fields(db.Song)] }) # Database is empty, so can insert without checking for song_id collisions db.add_songs(song_list) return "Created collection at /songs", 201
def create_song(song_id): if db.is_in_database(song_id): raise InvalidUsage( f"Song with song_id {song_id} already exists in database. Try \"PUT\" or \"DELETE\"", status_code=409) # Verify payload if request.content_type != "application/json": raise InvalidUsage(f"Content-type is not set to \"application/json\"", status_code=409) try: payload_json = request.json except Exception as e: raise InvalidUsage(f"Payload is not a valid json", status_code=409, payload={"exception": str(e)}) try: song_list = list(map(db.Song.from_dict, payload_json)) except Exception as e: raise InvalidUsage( f"JSON can not be converted into valid song data. See payload for required columns", status_code=409, payload={ "exception": str(e), "columns": [field.name for field in dataclasses.fields(db.Song)] }) db.add_songs(song_list) return f"Created song at /songs/{song_id}", 201
def get_sentiment(): status_code = HTTPStatus.OK response = {"status_code": status_code, "results": dict(), "message": ""} try: text = request.get_json()["text"] results = dict() results["sentiment"], results["score"] = get_sentiment_result(text) response["message"] = text response["results"] = results except KeyError as error: # ex. invalid input field name message = f"'{error.args[0]}' key is not found" logging.error(f"{message}: {error}") raise InvalidUsage(message, status_code=HTTPStatus.BAD_REQUEST) except AttributeError as error: # ex. Text/Languge should be string message = "Input should be string" logging.error(f"{message}") raise InvalidUsage(message, status_code=HTTPStatus.BAD_REQUEST) return jsonify(response), status_code
def show_songs(): query = "SELECT * FROM music" filters = [] options = [] if request.args.get('year'): try: year = int(request.args.get('year')) except ValueError: raise InvalidUsage( f"Value for paremeter 'year' is not an integer: '{request.args.get('year')}'", status_code=422) filters.append(f"song_year = '{request.args.get('year')}'") if request.args.get('artist_id'): filters.append(f"artist_id = '{request.args.get('artist_id')}'") filters, options = parse_order_by_parameters(request, filters, options) if request.args.get('count'): options.append(f"LIMIT {request.args.get('count')}") query = compute_query(query, filters, options) rows = db.fetch(query) linked_songs = list(map(db.LinkedSong.from_db_row, rows)) if request.content_type in {"application/json", None}: return jsonify(linked_songs) elif request.content_type == "text/csv": return create_csv_response(db.LinkedSong, linked_songs)
def delete_song(song_id): if not db.is_in_database(song_id): raise InvalidUsage( f"Song with song_id {song_id} does not exists in database.", status_code=409) db.delete_song(song_id) return f"Deleted song at /songs/{song_id}", 202
def show_artist_songs(artist_id): query = "SELECT * FROM music" filters = [] filters.append(f"artist_id = '{artist_id}'") if request.args.get('year'): try: year = int(request.args.get('year')) except ValueError: raise InvalidUsage( f"Value for paremeter 'year' is not an integer: '{request.args.get('year')}'", status_code=422) filters.append(f"song_year = '{year}'") if request.args.get('genre'): filters.append(f"artist_terms LIKE '%{request.args.get('genre')}%'") query = compute_query(query, filters) rows = db.fetch(query) linked_songs = list(map(db.LinkedSong.from_db_row, rows)) if request.content_type in {"application/json", None}: return jsonify(linked_songs) elif request.content_type == "text/csv": return create_csv_response(db.LinkedSong, linked_songs)
def parse_order_by_parameters(request, filters, options): if request.args.get('order_by'): order_by = request.args.get('order_by') # Ordering by given column and optionally given direction (default=descending) if not db.column_exists(order_by): raise InvalidUsage( f"Incorrect column name: '{str(order_by)}'. Check payload for viable column names", status_code=422, payload={"columns": db.get_columns()}) else: # Ordering by column value ordering = f" ORDER BY {request.args.get('order_by')}" if request.args.get('direction'): direction = request.args.get('direction') if direction in {"asc", "ascending", "ASC", "ASCENDING"}: ordering += " ASC" elif direction in { "dsc", "desc", "descending", "DSC", "DESC", "DESCENDING" }: ordering += " DESC" else: directions = [ "asc", "ascending", "ASC", "ASCENDING", "dsc", "desc", "descending", "DSC", "DESC", "DESCENDING" ] raise InvalidUsage( f"Undefined direction: '{direction}'. Check payload for viable directions", status_code=422, payload={"directions": directions}) else: # Default option ordering += " DESC" options.append(ordering) # Adding treshold value for column if request.args.get('treshold'): try: treshold = float(request.args.get('treshold')) except ValueError: raise InvalidUsage( f"Value for paremeter 'treshold' is not castable to float: '{request.args.get('treshold')}'", status_code=422) filters.append(f"{order_by} > {treshold}") return filters, options
def login(): payload = request.get_json()["user"] password = payload['password'] user = db.session.query(User).filter_by(email=payload["email"]).first() if not check_password(password, user.password): raise InvalidUsage.user_not_found() return jsonify(user.to_dict())
def decorated_function(*args, **kwargs): if not ("Authorization" in request.headers): raise InvalidUsage.invalid_token() auth = request.headers.get("Authorization") regex = re.compile(r"^Token ([a-z0-9]+)") match = regex.search(auth) if match is None: raise InvalidUsage.invalid_token() token = match.group(1) user = db.session.query(User).filter_by(token=token).first() print(user) if user is None: raise InvalidUsage.invalid_token() g.user = user return f(*args, **kwargs)
def before_request(): try: # Here you can add the relevant code to get the authentication information from the custom database. # The existing code is for reference only in terms of format. sign.set_auth([{ 'accessKey': "C180130204197838", 'secretKey': "62d7eb0d370e603acd651066236c878b" }]) except Exception: # Here Exception needs to be changed to the corresponding exception you need. raise InvalidUsage(**_except.DATABASE_CONNECTION_ERROR)
def search(): time = dt.now() query = request.args.get('q', None) if not query: raise InvalidUsage("Query param \'q\' required.", status_code=400) try: response = {} jobs = [] response['query'] = query response['results'] = {} jobs = [ gevent.spawn(process_command, x, query, time) for x in REQUEST_TO ] gevent.joinall(jobs) for res in jobs: response['results'][res.value[0]] = res.value[1] # print 'full', dt.now()-time return get_response(response) except: raise InvalidUsage("Something happen. Please try again later.", status_code=500)
def server_info(): ip = request.args.get('ip') if not ip: raise InvalidUsage('no ip') port = request.args.get('port') if port is None: port = 25565 else: try: port = int(port) except: raise InvalidUsage('invalid port') m = md5() m.update('q%s%s' % (ip, port)) m = m.hexdigest() try: result = get_cache_item(m) except: result = False if not result: result = {} result['time'] = time() try: result['value'] = MCQuery(ip, port).full_stat() except Exception as e: result['value'] = False set_cache_item(m, result['value']) if not result['value']: return jsonify({'status': 'success', 'online': False}) return jsonify(parse_query_data(result))
def delete_songs(): conn = db.get_connection() cursor = conn.cursor() # For checking if database is empty query = f"SELECT * FROM music" cursor.execute(query) row = cursor.fetchone() if row is None: # Database is empty raise InvalidUsage(f"Resource does not exists.", status_code=409) conn.close() db.clear_music_table() return "Resource deleted", 200
def show_song(song_id): if not db.is_in_database(song_id): raise InvalidUsage( f"Song with song_id {song_id} does not exists in database. Try \"POST\"", status_code=409) query = f"SELECT * FROM music WHERE song_id = '{song_id}'" rows = db.fetch(query) songs = list(map(db.LinkedSong.from_db_row, rows)) if request.content_type in {"application/json", None}: return jsonify(songs) elif request.content_type == "text/csv": return create_csv_response(db.LinkedSong, songs)
def connect(): vlc = VLCRemote() port = request.form.get('port', type=str) password = request.form.get('pass', type=str) try: vlc.login(port, password) except Exception as e: print("Except") raise InvalidUsage('Can not connect to vlc', status_code=400) with open('credentials.txt', 'w') as file: json.dump({'port': port, 'password': password}, file) return {}
def show_artist(artist_id): # TODO This should link to release/songs from this artist # TODO This should display metadata about the artist, including statistics about the popularity of the songs, # optionally filtered by a specific year, such as mean, median and stddev. Also most recent release or something # could be nice query = "SELECT * FROM music" filters = [] filters.append(f"artist_id = '{artist_id}'") if request.args.get('year'): try: year = int(request.args.get('year')) except ValueError: raise InvalidUsage( f"Value for paremeter 'year' is not an integer: '{request.args.get('year')}'", status_code=422) filters.append(f"song_year = '{request.args.get('year')}'") query = compute_query(query, filters) rows = db.fetch(query) song_rows = list(map(db.SongRow.from_db_row, rows)) if len(song_rows) == 0: meta_data = {} else: meta_data = {} meta_data["mean_song_popularity"] = mean([ song_row.song_hotttnesss for song_row in song_rows if song_row.song_hotttnesss is not None ]) meta_data["median_song_popularity"] = median([ song_row.song_hotttnesss for song_row in song_rows if song_row.song_hotttnesss is not None ]) if len(song_rows) > 1: meta_data["stddev_song_popularity"] = stdev([ song_row.song_hotttnesss for song_row in song_rows if song_row.song_hotttnesss is not None ]) else: meta_data["stddev_song_popularity"] = "undefined (need 2 entries)" meta_data["artist_familiarity"] = mean([ song_row.artist_familiarity for song_row in song_rows if song_row.artist_familiarity is not None ]) meta_data["artist_hotttnesss"] = mean([ song_row.artist_hotttnesss for song_row in song_rows if song_row.artist_hotttnesss is not None ]) # First generate list of all name occurences, then find most common one, then take set of name occurences meta_data["artist_names_set"] = [ song_row.artist_name for song_row in song_rows ] meta_data["artist_name"] = most_common(meta_data["artist_names_set"]) meta_data["artist_names_set"] = list(set( meta_data["artist_names_set"])) # First generate list of all term occurences, then find most common one, then take set of term occurences meta_data["artist_terms_set"] = [ song_row.artist_terms for song_row in song_rows ] meta_data["artist_terms"] = most_common(meta_data["artist_terms_set"]) meta_data["artist_terms_set"] = list(set( meta_data["artist_terms_set"])) # Generate links in a dirty way linked_artist = db.LinkedArtist.from_db_row(rows[0]) meta_data["links"] = linked_artist.links if request.content_type in {"application/json", None}: return jsonify(meta_data) elif request.content_type == "text/csv": return dict_to_csv_response(meta_data)
def predict(): """Return prediction based on request.args Example: GET request args should look like { 'GP': 36, 'MIN': 27.4, 'PTS': 7.4, 'FGM': 2.6, 'FGA': 7.6, 'FG%': 34.7, '3P Made': 0.5, '3PA': 2.1, '3P%': 25, 'FTM': 1.6, 'FTA': 2.3, 'FT%': 69.9, 'OREB': 0.7, 'DREB': 3.4, 'REB': 4.1, 'AST': 1.9, 'STL': 0.4, 'BLK': 0.4, 'TOV': 1.3 } Returns: str: prediction, a player is worth investing or not """ try: with open('configs.yaml', 'r') as f: configs = yaml.safe_load(f) if 'api' in configs and 'model_path' in configs['api']: model_path = configs['api']['model_path'] if 'api' in configs and 'scaler_path' in configs['api']: scaler_path = configs['api']['scaler_path'] model = Model(model_path=model_path, scaler_path=scaler_path) features_name = [ 'GP', 'MIN', 'PTS', 'FGM', 'FGA', 'FG%', '3P Made', '3PA', '3P%', 'FTM', 'FTA', 'FT%', 'OREB', 'DREB', 'REB', 'AST', 'STL', 'BLK', 'TOV' ] x = [] for f in features_name: value = request.args.get(f) if value: x.append(value) else: x.append(0) x_minmax = model.scaler.transform([x]) y_pred = model.model.predict(x_minmax)[0] return "Prediction: " + str(y_pred) + ", so this player " + { 0: 'is not', 1: 'is' }[y_pred] + " worth investing in NBA" except ValueError as e: raise InvalidUsage(str(e), status_code=500) except yaml.YAMLError as e: raise InvalidUsage(str(e), status_code=500) except Exception as e: raise InvalidUsage(str(e), status_code=500)
def server_status(): ip = request.args.get('ip') if not ip: raise InvalidUsage('no ip') port = request.args.get('port') if port is None: port = 25565 else: try: port = int(port) except: raise InvalidUsage('invalid port') m = md5() m.update('%s%s' % (ip, port)) m = m.hexdigest() try: result = get_cache_item(m) except: result = False olderServer = False if not result: result = {} result['time'] = time() try: result['value'] = get_info(ip, port) except ValueError: # Can't decode it as JSON, it's probably an old server olderServer = True except socket.error: # Can't connect, this can sometimes be an old server issue olderServer = True except: # Server is probably down result['value'] = False if olderServer: try: result['value'] = get_info_old(ip, port) result['value']['old'] = True except: # If we still can't get information, it's probably not a Minecraft server result['value'] = False set_cache_item(m, result['value']) if not result['value']: return jsonify({'status': 'success', 'online': False}) if request.args.get('favicon') is None or request.args.get( 'favicon') == 'false': favicon = False else: favicon = True if request.args.get('players') is not None or request.args.get( 'players') == 'true': players = True else: players = False if olderServer or 'old' in result['value']: result = parse_old_data(result) else: result = parse_server_data(result, favicon, players) return jsonify(result)