Exemplo n.º 1
0
def scrape_all():

    # Calling the file (scrape) and the scrape function in that file (scrape_all)
    mars_data = scrape.scrape_all()

    # Update the Mongo database using update and upsert=True
    mongo.db.mar_data.update({}, mars_data, upsert=True)

    # Redirect back to home page
    return redirect("/")
Exemplo n.º 2
0
def scraper():

    # Save the mars_db 
    mars_mongo = mongo.db.mars_db

    # Run the scrape function
    mars_info = scrape.scrape_all()
    
    # Update the Mongo database using update and upsert=True
    mars_mongo.update({}, mars_info, upsert=True)

    # Redirect back to home page
    return redirect("/")
Exemplo n.º 3
0
	def get(self):
		### Add new entry
		new_xml, new_json = scrape.scrape_all()
		curr_date = scrape.getDate()
		mx = MenuXML(xml=new_xml,json=new_json,key_name=str(curr_date))
		mx.put()

		### Delete old entries
		all_entries = MenuXML.all()
		to_be_deleted = all_entries.filter("__key__ != ", db.Key.from_path('MenuXML', str(curr_date)))
		db.delete(to_be_deleted)

		# Clear memcache entries
		memcache.flush_all()

		# Add to memcache
		if not memcache.add(str(curr_date), mx, 7200): #keep for 1 hour. menu updates every hour
			logging.error('Memcache set failed.')
Exemplo n.º 4
0
	def get(self):
		self.response.headers['Access-Control-Allow-Origin'] = '*'
		self.response.headers['Content-Type'] = 'text/plain'
		curr_date = scrape.getDate()

		# First check if the menu has been cached
		cached_result = memcache.get(str(curr_date))
		if cached_result is None:
			# if the menu hasnt been cached, check the database
			date_key = db.Key.from_path('MenuXML', str(curr_date))
			db_result = MenuXML.get(db.Key.from_path('MenuXML', str(curr_date)))

			# if the database is empty, then parse
			if db_result == None or str(result.xml)=="None":
				new_xml, new_json = scrape.scrape_all()

				# add to database
				curr_date = scrape.getDate()
				mx = MenuXML(xml=new_xml,json=new_json,key_name=str(curr_date))
				mx.put()

				# add to memcache
				if not memcache.add(str(curr_date), mx, 7200): #keep for 1 hour. menu updates every hour
					logging.error('Memcache set failed.')

				### Delete old entries
				all_entries = MenuXML.all()
				to_be_deleted = all_entries.filter("__key__ != ", db.Key.from_path('MenuXML', str(curr_date)))
				db.delete(to_be_deleted)

				self.response.write(str(new_json))

			# db_result is valie
			else: 
				# add to memcache
				if not memcache.add(str(curr_date), db_result, 7200): #keep for 1 hour. menu updates every hour
					logging.error('Memcache set failed.')
				# return xml
				self.response.write(str(db_result.json))

		#otherwise, return the cached data		
		else:
			self.response.write(str(cached_result.json))
Exemplo n.º 5
0
def scraper():
    mars = mongo.db.mars
    mars_data = scrape_all()
    mars.update({}, mars_data, upsert=True)
    return redirect("/", code=302)
Exemplo n.º 6
0
def scrapper():
    mars = mongo.db.mars
    mars_data = scrape.scrape_all()
    mars.update({}, mars_data, upsert=True)
    return "Scraping Successful"
Exemplo n.º 7
0
def scrape():
    scrape.scrape_all()
    return redirect("/")
Exemplo n.º 8
0
def scrape():
    mars = mongo.db.mars
    mars_data = scrape.scrape_all()
    mars.update({}, mars_data, upsert=True)
    return "Scraping Complete!"
def run_one_user(user, start_date, end_date):
    scrape_all(user, start_date, end_date)
    get_all_metadata(user)
    parse_json(user)
    clean_up_files(user)
Exemplo n.º 10
0
def scrape_view(request):
    if request.user.is_authenticated() and request.user.is_staff:
        scrape.scrape_all()
        return redirect('admin:index')
    return HttpResponseForbidden()