def scrape_top_papertowns_posts(): submissions = praw_client.get_subreddit('papertowns').get_hot( limit=SCRAPER_LIMIT) for submission in submissions: lat_and_long = lat_long_from_reddit_title(submission.title) if lat_and_long is None: continue else: latitude, longitude = lat_and_long # check to see if a post already exists with this submission url existing = session.query(Town).filter( Town.submission_url == submission.permalink).count() if existing > 0: continue # create the new entry town = Town(title=submission.title, image_url=submission.url, submission_url=submission.permalink, longitude=longitude, latitude=latitude) session.add(town) session.commit()
def get_places_by_bounds(left, top, right, bottom): towns = session.query(Town).filter(Town.latitude >= left, Town.latitude <= right, Town.longitude >= top, Town.longitude <= bottom) return json.dumps([town.to_dict() for town in towns])
def scrape_top_papertowns_posts(): submissions = praw_client.get_subreddit('papertowns').get_hot(limit=SCRAPER_LIMIT) for submission in submissions: lat_and_long = lat_long_from_reddit_title(submission.title) if lat_and_long is None: continue else: latitude, longitude = lat_and_long # check to see if a post already exists with this submission url existing = session.query(Town).filter(Town.submission_url==submission.permalink).count() if existing > 0: continue # create the new entry town = Town(title = submission.title, image_url = submission.url, submission_url = submission.permalink, longitude = longitude, latitude = latitude) session.add(town) session.commit()