Esempio n. 1
0
def scrape():

    # Run the scrape function *updated 10/22/2020
    nasa_data = scrape_nasa.scrape_info()

    # Update the Mongo database using update and upsert=True
    mongo.db.collection.update({}, nasa_data, upsert=True)

    # Redirect back to home page
    return redirect("/")
Esempio n. 2
0
def scrape():
    """
    #-----------------------------------------------------------------------------
    #
    # App.py is run under Flask. The interface between APP.py and Internet.
    # A buttom is on the index.html file, which routes the html to "/scrape"
    # We can run the "/scrape route" manually entering in a url or via a click:
    #       
    #          1. A function is stored in a program called scrape_nasa.py
    #          2. scrape_nasa.py has a function embedded in it as scrape_info
    #          3. Scrape_info is completed using Splinter + Soup for scraping
    #          4. Scrape function returns a Dictionary of key variables of interest
    #             as a dictionary called nasa_data:
    #          5. nasa_data, then gets upserted in MongoDB called Nasa.news
    #          6. Nasa database was define by Piruz Alemi, in Mongo DB to contain
    #             collections of data. These collections could also be called like
    #             Nasa.collection, or Nasa.NewsGroup or Nasa.news, etc. We can have 
    #             different collections of data under a single mongo database
    #          7. Scraped data is stored in MongoDB was via command mongo.db.collection...
    #             This part is coded in scrape_nasa.py, when we scrape using splinter
    #             + soup we also save the scrape in MongoDB 
    #          8. Control is then passed or redirected back to where we started
    #
    #                                      Piruz Alemi Jan 27th, 2020
    # ----------------------------------------------------------------------------------   
    #  Watch out! Following command, is a function called from within a program!!!! 
    # ----------------------------------------------------------------------------------            
    """
    nasa_data = scrape_nasa.scrape_info()
    # --------------------------------------------------------------------------------------
    # Update the Mongo database using update and upsert=True
    # {} are filtering parameters,
    # nasa_data is a dictionary
    mongo.db.news.update({}, nasa_data, upsert=True)
    # mongo.db.news.insert({}, nasa_data)
    # ------------------------------------------------------------------------------------------
    # We can run mongo shell without any command-line
    # options to connect to a MongoDB instance running on our localhost with default port 27017:
    # -------------------------------------------------------------------------------------------

    # Redirect back to home page
    return redirect("/")
Esempio n. 3
0
def scrape():
    mars_data = scrape_nasa.scrape_info()

    mongo.db.collection.update({}, mars_data, upsert=True)

    return redirect("/")
Esempio n. 4
0
def scrape():
    nasa_data = scrape_nasa.scrape_info()

    mongo.db.nasa.update({}, nasa_data, upsert=True)

    return redirect("/")