def __init__(self, recreate=False): datamodel = Datamodel() self.table, self.table_config = datamodel.population() self.postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') self.postgres.initialize_table(self.table, recreate=False, **self.table_config) self.googlegeo = GoogleGeo() # myan: only get major cities data once per request self.major_cities_postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') self.major_cities = self.major_cities_postgres.get( "select * from TestMajorCities") self.all_states = self.major_cities['state'].values self.all_cities = self.major_cities['city'].values self.all_lats = self.major_cities['lat'].values self.all_lngs = self.major_cities['lng'].values self.all_population = self.major_cities['population'].values
def push_major_cities(): all_cities = requests.get( 'http://api.census.gov/data/2013/acs3?get=NAME,B01001_001E&for=place:*' ) major_cities, _ = _parse_city_population(all_cities.json()) major_cities = major_cities.loc[ major_cities.population >= 100000] # myan: major city should have more than 100000 major_cities = _get_city_info(major_cities) postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') field_types = { 'place_id': 'TEXT', 'city': 'TEXT', 'state': 'TEXT', 'population': 'INT', 'lat': 'FLOAT', 'lng': 'FLOAT' } postgres.initialize_table('TestMajorCities', fields_types=field_types, primary_key='place_id', not_null_fields=['place_id', 'city', 'state'], recreate=False) postgres.put_dataframe(major_cities, field_types)
def test_parse_list_values(self): pg = Postgresql() insert = pg.parse_values_list( [{ 'gsId': 100, 'name': 'Max', 'zip_code': 123, 'state': 'CA', 'gsRating': 3.5 }, { 'gsId': 101, 'name': 'Tez', 'zip_code': 123, 'state': 'CA', 'gsRating': 8.5 }], { 'gsId': 'INT', 'zip_code': 'INT', 'state': 'TEXT', 'name': 'TEXT', 'gsRating': 'FLOAT' }, field_list=['gsId', 'name', 'gsRating']) assert insert == "(100, 'Max', 3.5),(101, 'Tez', 8.5)"
def __init__(self, key=None): if key is None: self.api_key = _get_great_schools_api_key() else: self.api_key = key # myan: initialize postgresql datamodel = Datamodel() self.table, self.table_config = datamodel.great_schools() self.postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') self.postgres.initialize_table(self.table, recreate=False, **self.table_config)
def test_make_schema_string(self): pg = Postgresql() fields = { 'gsid': 'INT', 'zip_code': 'INT', 'state': 'TEXT', 'name': 'TEXT', 'gsrating': 'FLOAT' } primary_key = 'gsid' not_null_fields = ['gsid', 'zip_code', 'state', 'name'] schema_string = pg.make_schema_string(fields, primary_key=primary_key, not_null_fields=not_null_fields) assert schema_string == '(gsid INT PRIMARY KEY NOT NULL,zip_code INT NOT NULL,state TEXT NOT NULL,name TEXT NOT NULL,gsrating FLOAT)'
class ZillowDataHandler: def __init__(self, table=None, recreate=False): datamodel = Datamodel() if table is None: table, table_config = datamodel.zipcode_timeseries() else: _, table_config = datamodel.zipcode_timeseries() self.time_series_postgres = self._initialize_postgres( (table, table_config), recreate=recreate) def _initialize_postgres(self, (table, config), recreate=False): postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') postgres.initialize_table(table, recreate=recreate, **config) return postgres def get_data_from_zillow(self): # TODO: complete downloading data from web part pass def _drop_columns(self, data, all_columns=('RegionID', 'City', 'Metro', 'SizeRank')): to_drop = [] for column in all_columns: if column in data.columns:
class Plotter: def __init__(self, min_price=150000, max_price=300000, top_percentage=0.25, top_max_num_entries=30): self.MIN_PRICE = min_price self.MAX_PRICE = max_price self.TOP_PERCENTAGE = top_percentage self.TOP_MAX_NUM_ENTRIES = top_max_num_entries datamodel = Datamodel() self.time_series_postgres = self._initialize_postgres( datamodel.zipcode_timeseries()) self.population_postgres = self._initialize_postgres( datamodel.population()) self.great_schools_postgres = self._initialize_postgres( datamodel.great_schools()) self.zipcode_timeseries = None self.top_zipcodes_timeseries = None self.rest_zipcodes_timeseries = None self.top_zipcodes_school_data = None self.top_zipcodes_population_data = None self.all_months_timeseries = None def reset(self): self.zipcode_timeseries = None self.top_zipcodes_timeseries = None self.rest_zipcodes_timeseries = None self.top_zipcodes_school_data = None self.top_zipcodes_population_data = None self.all_months_timeseries = None def _get_time_series_data(self, year_month=201510): suitable_states = self.time_series_postgres.get( "select * from {table} where year_month={year_month} and median_price < {max_price} and median_price > {min_price}" .format(table=self.time_series_postgres.table, year_month=year_month, max_price=self.MAX_PRICE, min_price=self.MIN_PRICE)) suitable_states[ 'gross_yield_pct'] = suitable_states.median_rent * 12 / suitable_states.median_price * 100 suitable_states = suitable_states.sort('gross_yield_pct', ascending=False) top_index = min(int(len(suitable_states) * self.TOP_PERCENTAGE), self.TOP_MAX_NUM_ENTRIES) self.zipcode_timeseries = suitable_states self.top_zipcodes_timeseries = suitable_states[:top_index] self.rest_zipcodes_timeseries = suitable_states[top_index:] def _get_population_data(self): if self.top_zipcodes_timeseries is None: self._get_time_series_data() p = Population(recreate=False) self.top_zipcodes_population_data = p.run( addresses=self.top_zipcodes_timeseries.zip_code.values) def _get_top_zipcodes_school_data(self): all_states = self.top_zipcodes_timeseries.state.values all_zipcodes = self.top_zipcodes_timeseries.zip_code.values unique_states = self.top_zipcodes_timeseries.state.unique() requests = [] for state in unique_states: unique_zipcodes = np.unique(all_zipcodes[all_states == state]) for zip_code in unique_zipcodes: requests.append( dict(db_configs=dict( postgres=self.great_schools_postgres, query= "select * from {table} where state='{state}' and zip_code={zip_code};" .format(table=self.great_schools_postgres.table, state=state, zip_code=zip_code)), api_configs=dict(api=GreatSchools, api_key=None, api_args=dict(state=state, zip_code=zip_code, limit=20)))) mission_control = MissionControl() gs_data = mission_control.request_data(user_requests=requests) gs_data_df = pd.DataFrame() for entry in gs_data: gs_data_df = gs_data_df.append(entry) self.top_zipcodes_school_data = pd.DataFrame( gs_data_df.groupby('zip_code')['gsrating'].mean()) self.top_zipcodes_school_data[ 'zip_code'] = self.top_zipcodes_school_data.index def _initialize_postgres(self, (table, config)): postgres = Postgresql(user_name='postgres', password='******', host='localhost', port='5432', db='TestProject') postgres.initialize_table(table, recreate=False, **config) return postgres