def make_db_tables(): #connect to db engine = scraper.connect_db() try: #use database and create new record #create static data table sql = """CREATE TABLE IF NOT EXISTS bike_stations (station_number INT NOT NULL, station_name VARCHAR(45) NOT NULL, station_address VARCHAR(45) NOT NULL, station_loc_lat FLOAT NOT NULL, station_loc_long FLOAT NOT NULL, banking_available TINYINT(1) NOT NULL, bonus TINYINT(1), PRIMARY KEY (station_number));""" engine.execute(sql) #create dynamic data table sql = """CREATE TABLE IF NOT EXISTS availability (station_number INT NOT NULL, bike_stands INT NOT NULL, bike_stands_available INT NOT NULL, bikes_available INT NOT NULL, last_updated BIGINT NOT NULL, PRIMARY KEY (station_number, last_updated), FOREIGN KEY (station_number) REFERENCES bike_stations(station_number));""" engine.execute(sql) except Exception as e: print("Error Type: ", type(e)) print("Error Details: ", e)
def alter_column_datatype(table, column, data_type): """ Function to edit data types in database..""" engine = scraper.connect_db() try: sql = "ALTER TABLE %s MODIFY COLUMN %s %s;" engine.execute(sql, (table, column, data_type)) except Exception as e: print("Error type: ", type(e)) print("Error details: ", e)
def alter_table(): engine = scraper.connect_db() try: sql = "ALTER TABLE bike_stations CHANGE station_location station_loc_lat FLOAT NOT NULL;" engine.execute(sql) sql2 = "ALTER TABLE bike_stations ADD COLUMN station_loc_long FLOAT NOT NULL AFTER station_loc_lat;" engine.execute(sql2) sql3 = "DESCRIBE availability;" res = engine.execute(sql3) print(res.fetchall()) except: print("NoooOOOooo")
def get_stations(): engine = scraper.connect_db( "DublinBikeProjectDB.cun91scffwzf.eu-west-1.rds.amazonaws.com", "3306", "DublinBikeProjectDB", "theForkAwakens", "/home/ubuntu/anaconda3/envs/TheForkAwakens/Assignment4-P-E-K/src/scraper/db_password.txt" ) sql = "select * from bike_stations;" rows = engine.execute(sql).fetchall() print("#found {} stations", len(rows)) stations = jsonify(stations=[dict(row) for row in rows]) engine.dispose() return stations
def availability(): engine = scraper.connect_db( "DublinBikeProjectDB.cun91scffwzf.eu-west-1.rds.amazonaws.com", "3306", "DublinBikeProjectDB", "theForkAwakens", "/home/ubuntu/anaconda3/envs/TheForkAwakens/Assignment4-P-E-K/src/scraper/db_password.txt" ) # change this to suit what queries we will be using sql = "SELECT * from availability;" rows = engine.execute(sql).fetchall() print("#found {} availability", len(rows)) availability = jsonify(stations=[dict(row) for row in rows]) engine.dispose() return availability
def station_details(): """Function to get dyanmic details for stations""" #Info will be pulled from a javascript function on the home page station_number = request.args.get('station_number') engine = scraper.connect_db( "DublinBikeProjectDB.cun91scffwzf.eu-west-1.rds.amazonaws.com", "3306", "DublinBikeProjectDB", "theForkAwakens", "/home/ubuntu/anaconda3/envs/TheForkAwakens/Assignment4-P-E-K/src/scraper/db_password.txt" ) sql = "SELECT *, station_name FROM availability, bike_stations WHERE availability.station_number = %s and availability.station_number = bike_stations.station_number ORDER BY last_updated DESC LIMIT 1;" details = engine.execute(sql, station_number).fetchall() print("#found {} stations", len(details)) details = jsonify(stations=[dict(detail) for detail in details]) engine.dispose() return details
def hourly_avg_dynamic(station_number, day): """Getting the hourly average bikes for a particular station on a particular day""" sql = "select * from availability where station_number = %s;" engine = scraper.connect_db( "DublinBikeProjectDB.cun91scffwzf.eu-west-1.rds.amazonaws.com", "3306", "DublinBikeProjectDB", "theForkAwakens", "/home/ubuntu/anaconda3/envs/TheForkAwakens/Assignment4-P-E-K/src/scraper/db_password.txt" ) station_details = engine.execute(sql, station_number).fetchall() engine.dispose() hours_bikes = [] hours_stands = [] avg_bikes = [] avg_stands = [] for i in range(25): hours_bikes.append([0, 0]) hours_stands.append([0, 0]) for station in station_details: num_bikes = station["bikes_available"] num_stands = station["bike_stands_available"] #working out which hour we are dealing with last_update = station["last_updated"] dtime = scraper.datetime_formatter(last_update) hour = int(dtime[1][11:13]) hours_bikes[hour][0] += num_bikes hours_bikes[hour][1] += 1 hours_stands[hour][0] += num_stands hours_stands[hour][1] += 1 for hour in hours_bikes: if hour[0] > 0 and hour[1] > 0: avg_bikes_hour = int(round((hour[0] / hour[1]), 0)) avg_bikes.append(avg_bikes_hour) for hour in hours_stands: if hour[0] > 0 and hour[1] > 0: avg_stands_hour = int(round((hour[0] / hour[1]), 0)) avg_stands.append(avg_stands_hour) return avg_bikes, avg_stands
def daily_avg_dynamic(station_number, day): """Returns the average number of bike per day""" engine = scraper.connect_db( "DublinBikeProjectDB.cun91scffwzf.eu-west-1.rds.amazonaws.com", "3306", "DublinBikeProjectDB", "theForkAwakens", "/home/ubuntu/anaconda3/envs/TheForkAwakens/Assignment4-P-E-K/src/scraper/db_password.txt" ) sql = "select bikes_available, bike_stands_available from availability where station_number = %s and day = %s;" results = engine.execute(sql, station_number, day).fetchall() bikes = [] bike_stands = [] for row in results: bikes.append(row["bikes_available"]) bike_stands.append(row["bike_stands_available"]) avg_bikes = int(round((sum(bikes) / len(bikes)), 0)) avg_bike_stands = int(round((sum(bike_stands) / len(bike_stands)), 0)) engine.dispose() return avg_bikes, avg_bike_stands