Exemple #1
0
def get_products(products):
    """
		This task is reponsible for retrieving products of a product and location.

		Input :
			- products = [{
				'url':...,
				'location':...
			}]
	"""
    scraper = OoshopScraper()

    for product in products:
        try:
            url = product['url']
            location = None
            if 'location' in product:
                location = product['location']
            scraper.get_product_info(product_url=url,
                                     location=location,
                                     save=True)
            # time.sleep(1) # Temporisation in order not to flood server
        except Exception, e:
            print 'Error in get_products tasks :'
            print product
            print e
Exemple #2
0
def simple_update():
    from ooshop.models import Product, Promotion
    from datetime import datetime, timedelta
    scraper = OoshopScraper()
    # First get uncomplete products
    products = Product.objects.filter(exists=True,
                                      url__isnull=False,
                                      stemmed_text__isnull=True)

    if len(products) > 0:
        scraper.get_product_info(products[0].url, save=True)
        simple_update.apply_async(countdown=2)
    else:
        products = Product.objects.filter(exists=True,
                                          url__isnull=False,
                                          updated__lte=datetime.now() -
                                          timedelta(hours=24))
        if len(products) > 0:
            scraper.get_product_info(products[0].url, save=True)
            simple_update.apply_async(countdown=2)
        else:
            # Now getting multi promotions pages
            promotions = Promotion.objects.filter(availability=True,
                                                  type=Promotion.MULTI,
                                                  content__id__isnull=True)
            if len(promotions) > 0:
                scraper.get_product_info(promotions[0].url, save=True)
                simple_update.apply_async(countdown=2)
            else:
                simple_update.apply_async(countdown=3600)
Exemple #3
0
def get_products(products):
	"""
		This task is reponsible for retrieving products of a product and location.

		Input :
			- products = [{
				'url':...,
				'location':...
			}]
	"""
	scraper = OoshopScraper()

	for product in products:
		try:
			url = product['url']
			location = None
			if 'location' in product:
				location = product['location']
			scraper.get_product_info(product_url = url, location = location, save = True)
			# time.sleep(1) # Temporisation in order not to flood server
		except Exception, e:
			print 'Error in get_products tasks :'
			print product
			print e
Exemple #4
0
def simple_update():
	from ooshop.models import Product, Promotion
	from datetime import datetime, timedelta
	scraper = OoshopScraper()
	# First get uncomplete products
	products = Product.objects.filter(exists = True, url__isnull = False, stemmed_text__isnull = True)

	if len(products) >0:
		scraper.get_product_info(products[0].url, save=True)
		simple_update.apply_async(countdown = 2)
	else:
		products = Product.objects.filter(exists = True, url__isnull = False, updated__lte=datetime.now()-timedelta(hours = 24))
		if len(products)>0:
			scraper.get_product_info(products[0].url, save=True)
			simple_update.apply_async(countdown = 2)
		else:
			# Now getting multi promotions pages
			promotions = Promotion.objects.filter(availability = True, type = Promotion.MULTI, content__id__isnull = True)
			if len(promotions)>0:
				scraper.get_product_info(promotions[0].url, save=True)
				simple_update.apply_async(countdown = 2)
			else:
				simple_update.apply_async(countdown = 3600)