def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() hemispheres = mongo.db.hemispheres mars_data, hemi_list = scraping.scrape_all() mars.update({}, mars_data, upsert=True) for hemi in hemi_list: hemispheres.update_one({'title': hemi.get('title')}, {'$set': { 'img_url': hemi.get('img_url') }}, upsert=True) return "Scraping Successful!"
def populate(self): ''' Scrapes the website and adds everything to the database. ''' songs = scraping.scrape_all() for song in songs: self.insert_song(song)
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update( {}, mars_data, upsert=True ) # upsert=True, which tells Mongo to create a new document if one doesn’t already exist. return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) print("Scraping Successful!") time.sleep(1) return redirect("/")
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() # take new gathered data, and update the database using .update() mars.update({}, mars_data, upsert=True) # add message to let us know that the scraping was successful: return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) # return "Scraping Successful!" # return render_template("index.html", mars=mars) return render_template("return.html", mars=mars)
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) return "Scraping Successful!" mars.update({}, mars_data, upsert=True)
def scrape(): mars = mongo.db.mars # run imported scraping funciton mars_data = scraping.scrape_all() #update mars collection with scraped data mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars marshemi = mongo.db.marshemi # Add Hemi for challenge mars_data, marshemi_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) marshemi.update({}, marshemi_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() print(mars_data) mars.update({}, mars_data, upsert=True) #return "Scraping successful!" return redirect('/', code=302)
def scrape(): # Create and define a function mars = mongo.db.mars # Define a variable that points to Mongo DB mars_data = scraping.scrape_all() # Create a variable to hold scraped data mars.update( {}, mars_data, upsert=True ) # Update the database.Use data in mars_data,Ask Mongodb to create a new document return "Scraping Successful!" # Let us know if scraping is successful
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() print(f'Destination mongo mars .... {mars_data}') mars.update({}, mars_data, upsert=True) #return "Scraping Successful!" return render_template("scraping.html")
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars_data2 = scraping.hemisphere_image() mars_data.update(mars_data2) mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_images = mongo.db.mars_images mars_data, mars_images_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) mars_images.update({}, mars_images_data, upsert=True) # return "Scraping Successful!" return render_template("scraping.html")
def scrape(): mars = mongo.db.mars # assign a new variable pointing to the mars databse mars_data = scraping.scrape_all( ) # varible to hold the scraped data here we are referencing the scraping.py file exported from Jupyter Notebook mars.update( {}, mars_data, upsert=True ) # update the database. {} adds an empty JSON object. next use the data thats in mars_data and upsert tells mongo to create a new document if one doesn't already exist return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() # print(mars_data) # mars.replace_one({}, mars_data, upsert=True) # Redirect back to home page mars.update({}, mars_data, upsert=True) return redirect("/", code=302)
def scrape(): #new variable for mongo db mars = mongo.db.mars #new variable to scrape data mars_data = scraping.scrape_all() #updating mars_data db in mongo mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) return redirect('/', code=302) if __name__ == "__main__": app.run()
def scrape(): # new variable that points to our Mongo database mars = mongo.db.mars # created a new variable to hold the newly scraped data mars_data = scraping.scrape_all() # Update database with new data mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update( {}, mars_data, upsert=True ) # upsert=True: create a new document if one doesn’t already exist. # return "Scraping Successful!" return render_template("scraping_successful.html")
def scrape(): # Assign a new variable to point to the mongo db mars = mongo.db.mars # Use scraping.py mars_data = scraping.scrape_all() # Update the DB and create a new doc if one doesn't exist already. mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars # Run the function to get scrape information from all the databases mars_data = scraping.scrape_all() print(mars_data) # Update the mars database mars.update({}, mars_data, upsert=True) return "Scraping Successful"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) return "Scraping Successful!" if __name__ == "__main__": app.run()
def scrape(): # Assigning mongo data to the mars variable mars = mongo.db.mars # Performing the imported scraping.py's scape_all function. mars_data = scraping.scrape_all() # Updating the mongo database. mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): # assign new variable that points to database mars = mongo.db.mars # create new variable to hold newly scraped data mars_data = scraping.scrape_all() # update the database mars.update({}, mars_data, upsert=True) # redirect to home page following successful scrape return redirect('/', code=302)
def scrape(): # connect to the mars mongodb collection mars = mongo.db.mars # get the most up-to-date mars data mars_data = scraping.scrape_all() # use the newly retrieved data to update the mars collection mars.update({}, mars_data, upsert=True) # redirect the browser to the index page to display new data return redirect(url_for('index'))
def scrape(): #where to look mars = mongo.db.mars #scrape data mars_data = scraping.scrape_all() #update database; #add an empty json fie, db, create a new document if one does not exist mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() print( "+++++++++++++++%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%+++++++++++++++++++++YOUR IN THE SCRAPE FUNCTION" ) mars.update({}, mars_data, upsert=True) return "Scraping Successful!"
def scrape(): mars = mongo.db.mars mars_data = scraping.scrape_all() mars.update({}, mars_data, upsert=True) return redirect('/', code=302) mars.update({}, mars_data, upsert=True) return redirect('/', code=302)
def scrape(): # Assign a new variable that points to Mongo database mars = mongo.db.mars # Create new variable to hold newly scraped data mars_data = scraping.scrape_all() # Update the database mars.update({}, mars_data, upsert=True) # Navigate back to homepage to see updated content return redirect('/', code=302)