class StationDataframes: FIND_BY_ID_STATION_STATUS = ''' SELECT ss.station_id, ss.bikes_available, ss.docks_available, ss.time FROM `sfbike`.`station_status` ss where ss.station_id in (#) GROUP BY ss.station_id, year(ss.time), day(ss.time), month(ss.time), hour(ss.time), ss.bikes_available order by time asc; ''' FETCH_ALL_STATION_ACTIVITY_WEATHER = ''' SELECT * FROM sfbike.sf_station_activity_weather where station_id in (#) order by time desc;''' def __init__(self): self.dao = MySqlDataSouce() def get_station_activity_weather(self, station_ids): query = self.FETCH_ALL_STATION_ACTIVITY_WEATHER.replace('#', str(station_ids)) return pd.read_sql(query, con=self.dao.get_connection())
class MapStations: def __init__(self): self.dao = MySqlDataSouce() def sf_locations_string(self): results = self.dao.fetch_all_stations() sf_coords = "[['Lat', 'Long', 'Name'],\n" coord_strs = [] for row in results: if row[1] == 'San Francisco': coord_strs.append("[" + str(row[2]) + ", " + str(row[3]) + ", '" + str(row[5]) + "-" + str(row[0]) + "']") sf_coords += ",\n".join(coord_strs) + "]" return sf_coords def html(self): return HTML_FIRST_PART + self.sf_locations_string() + HTML_SECOND_PART
def test_database_add_search_term(self): database = MySqlDataSouce() id = database.add_search_history("TEST", "TE") self.assertTrue(id > 0)
def test_add_english_deutsch_translation(self): database = MySqlDataSouce() result = database.add_english_deutsch_translation(1, 1) self.assertTrue(result > 0)
def test_find_deutsch_word(self): database = MySqlDataSouce() result = database.find_deutsch_word("TEST", "NOUN") self.assertIsNotNone(result)
def test_database_add_deutsch_wod(self): database = MySqlDataSouce() id = database.add_deutsch_word("TEST", "NOUN", "N") self.assertTrue(id > 0)
def test_database_update_search_status_error_payload(self): database = MySqlDataSouce() id = database.update_search_status_payload(1, "ERROR", "abcdedfg") self.assertEqual(0, id)
def test_database_update_search_status(self): database = MySqlDataSouce() id = database.update_search_status(1, "SUCCESS") self.assertEqual(0, id)
def __init__(self): self.dao = MySqlDataSouce()
import time ENGLISH = "EN" DEUTSCH = "DE" NOUN = "NOUN" SUCCESS = "SUCCESS" ERROR_RATE_LIMIT = "ERROR RATE LIMIT" ERROR_CLIENT_POOL ="ERROR CLIENT POOL" ERROR_UKNOWN = "ERROR UKNOWN" THROTTLE_SECONDS = 10 EXCEPTION_SLEEP_SECONDS = 60 ERROR_COUNT_SLEEP_SECONDS = 500 extractor = WordExtractor() fetcher = LeoFetcher() dao = MySqlDataSouce() parser = LeoParser() with open('resources/words.txt', 'r') as hall: data = hall.read() words = extractor.get_words_from_text(data) error_count = 0 for word in words : if error_count > 5 : print("Error count reached sleeping " + ERROR_COUNT_SLEEP_SECONDS+ " seconds") error_count = 0 time.sleep(ERROR_COUNT_SLEEP_SECONDS) print("Searching for word: " + word)
from database import MySqlDataSouce import pandas as pd dao = MySqlDataSouce() results = dao.fetch_all_stations(); sf_coords = "[['Lat', 'Long', 'Name'],\n" coord_strs = [] for row in results: if row[1] == 'San Francisco': coord_strs.append("[" + str(row[2]) + ", " + str(row[3]) + ", '" + str(row[5]) + "']") sf_coords += ",\n".join(coord_strs) + "]" print sf_coords df = pd.read_sql("SELECT * FROM station", con=dao.get_connection()) print df.head()