def scrape_item_by_city_state_and_org(city, state, org, zipcode=None): time.sleep(3) try: print "Scraping " + org + " in " + city + ", " + state google_places = GooglePlaces(random.choice(GOOGLE_KEYS)) results = google_places.query( location= city + ", " + state, keyword=org, ) place = results.places[0] place.get_details() country = 'USA' state_code = state state_full = code_to_state[state] county = '' address = place.formatted_address url = place.website tel = place.local_phone_number international_tel = place.international_phone_number name = place.name lat = place.geo_location['lat'] lon = place.geo_location['lng'] print name print country print state_code print state_full print address print url print tel print lat print lon print '\n\n' m = Muni.create( zip_code = zipcode, zip_code_string = zipcode, org = org, city = city, address = address, county = county, country=country, url = url, tel=tel, international_tel=international_tel, state_code = state_code, state = state_full, success = True, title = name, lat = lat, lon = lon, email = '', date_created = datetime.datetime.utcnow(), last_modified = datetime.datetime.utcnow() ) return m except Exception, e: print e # SAVE FAILURE m = Muni.create( zip_code = zipcode, zip_code_string = zipcode, org = org, city = city, state_code = state, state = code_to_state[state], success = False, country='USA', email = '', lat = '0', lon = '0', address = '', title = '', tel = '', international_tel = '', url = '', date_created = datetime.datetime.utcnow(), last_modified = datetime.datetime.utcnow(), ) return m
def search_places_from_lat_long(lag_lng, radius=3200, keyword=None, types=[]): google_places = GooglePlaces(settings.GOOGLE_PLACES_API_KEY) query_result = google_places.query( lat_lng=lag_lng, keyword=keyword, radius =radius, types=types) return query_result