Beispiel #1
0
 def nested_delay_tile_serialization_test(self):
     tile = Tile.objects.get(pk=10)
     with mock.patch('apps.assets.models.Tile.update_ir_cache',
                     mock.Mock(return_value=('', False))) as mocked_handler:
         with delay_tile_serialization():
             with delay_tile_serialization():
                 tile.save()  # triggers signal disabled
                 self.assertEquals(mocked_handler.call_count, 0)
             self.assertEquals(mocked_handler.call_count, 0)
         self.assertEquals(mocked_handler.call_count, 1)
Beispiel #2
0
 def tile_m2m_changed_add_content_delay_tile_serialization_test(self):
     # sufficient to demonstrate signal is disabled / re-enabled
     # all possible inputs covered by signal test
     tile = Tile.objects.get(pk=10)
     content = Content.objects.get(pk=6)
     with mock.patch('apps.assets.models.Tile.update_ir_cache',
                     mock.Mock(return_value=('', False))) as mocked_handler:
         with delay_tile_serialization():
             tile.content.add(content)  # trigger signal disabled
             self.assertEquals(mocked_handler.call_count, 0)
         self.assertEquals(mocked_handler.call_count, 1)
Beispiel #3
0
 def productimage_saved_delay_tile_serialization_test(self):
     tile = Tile.objects.get(pk=10)
     pro = Product.objects.get(pk=13)
     product_image = ProductImage.objects.get(pk=11)
     tile.products.add(pro)
     with mock.patch('apps.assets.models.Tile.update_ir_cache',
                     mock.Mock(return_value=('', False))) as mocked_handler:
         with delay_tile_serialization():
             product_image.save()  # triggers signal disabled
             self.assertEquals(mocked_handler.call_count, 0)
         self.assertEquals(mocked_handler.call_count, 1)
Beispiel #4
0
def update_dominant_color(tiles):
    """
    Updates dominant color of all the images in the provided tiles
    Chooses non-product-shot default images for all tiles provided
    """
    def get_resource_url(public_id):
        return "https:{}/resources/image/upload/{}?colors=1".format(
            settings.CLOUDINARY_API_URL, public_id)

    rescrape = []  # list of product urls that must be re-scraped

    with delay_tile_serialization():
        with requests.Session() as s:
            s.auth = requests.auth.HTTPBasicAuth(
                settings.CLOUDINARY_API_KEY, settings.CLOUDINARY_API_SECRET)
            for i, t in enumerate(tiles):
                try:
                    pis = t.product.product_images.all()
                    print "{} {}: getting dominant_color for {} images".format(
                        i, t, pis.count())
                except AttributeError:
                    print "{} {}: no product".format(i, t)
                    continue
                if not len(t.product.product_images.all()):
                    print "{}: no product images".format(t)
                    continue
                for j, pi in enumerate(pis):
                    # Download image information from cloudinary
                    public_id = get_public_id(pi.url)
                    url = get_resource_url(public_id)
                    data = s.get(url)
                    result = json.loads(data.text)
                    if "error" in result:
                        print "\t{} Error: {}".format(
                            j, result['error']['message'])
                        rescrape.append(t.product.url)
                        continue
                    else:
                        pi.dominant_color = result['colors'][0][0]
                        pi.save()
                        print "\t{} {}".format(j, pi.dominant_color)
                # Now that product images have dominant color, update default image
                t.product.choose_lifestyle_shot_default_image()
                if t.product.default_image.is_product_shot:
                    print "Default image search failed. Chose first"
                    t.attributes['colspan'] = 1
                    t.save()

    if len(rescrape):
        print "Rescrape these product urls with {'refresh-images': True}:"
        pprint(rescrape)
    return rescrape
Beispiel #5
0
 def content_m2m_changed_similar_products_delay_tile_serialization_test(
         self):
     # sufficient to demonstrate signal is disabled / re-enabled
     # all possible inputs covered by signal test
     tile = Tile.objects.get(pk=10)
     pro = Product.objects.get(pk=3)
     pro2 = Product.objects.get(pk=12)
     tile.products.add(pro)
     with mock.patch('apps.assets.models.Tile.update_ir_cache',
                     mock.Mock(return_value=('', False))) as mocked_handler:
         with delay_tile_serialization():
             pro.similar_products.add(pro2)  # triggers signal disabled
             self.assertEquals(mocked_handler.call_count, 0)
         self.assertEquals(mocked_handler.call_count, 1)
Beispiel #6
0
    def _update_or_create_similar_product(self, data, store):
        """
        Try to find similar product based on SKU, then updated with data
        If created, generate product image
        """
        try:
            product = Product.objects.get(sku=data['SKU'], store=store)
        except Product.MultipleObjectsReturned:
            products = Product.objects.filter(sku=data['SKU'], store=store)
            product = Product.merge_products(products)
        except Product.DoesNotExist:
            product = Product(sku=data['SKU'],
                              url=data['THIRDPARTYCATEGORY'],
                              name=data['NAME'],
                              store=store)

        # Update
        self._update_product_cj_fields(product, data)

        with delay_tile_serialization():
            product.save()

            if not product.product_images.count():
                # Add one product image
                # TODO: utilize cloudinary
                product_image_url = data['ARTIST']
                product_image = ProductImage(
                    product=product,
                    url=product_image_url,
                    original_url=product_image_url,
                    file_type=get_filetype(product_image_url),
                    attributes={
                        'sizes': {
                            'master': {
                                'width': 430,
                                'height': 430,
                            },
                        },
                    })
                product_image.save()
                product.default_image = product_image
                product.save()

        return product
Beispiel #7
0
    def handle(self, url_slug, **options):
        page = Page.objects.get(url_slug=url_slug)
        store = page.store
        opts = {
            'similar_products': options['similar_products'],
        }
        results = {
            'logging/errors': {},
            'logging/items dropped': {
                'match failed': []
            },
            'logging/items out of stock': [],
            'logging/items updated': [],
            'logging/new items': [],
            'log': FakeLog(),
        }

        # Initialize Product Data Feed
        datafeed = find_datafeed(store.slug)()
        datafeed.load(opts)

        products = page.feed.get_all_products()  # returns QuerySet
        print u"Found {} products for {}".format(products.count(), url_slug)

        # Find product in product data feed & update
        with delay_tile_serialization():
            for product in products.iterator():
                match = field = val = None
                try:
                    match = datafeed.lookup_product(product)
                except Exception as e:
                    errors = results['logging/errors']
                    msg = '{}: {}'.format(e.__class__.__name__, e)
                    items = errors.get(msg, [])
                    items.append(product.url)
                    errors[msg] = items
                    results['logging/errors'] = errors
                    print 'logging/errors: {}'.format(msg)
                else:
                    data, field = match
                    if data:
                        # Found matching product
                        print u"{} match: {}".format(field, product.url)

                        datafeed.update_product(product, data)
                        product.save()

                        if product.in_stock:
                            results['logging/items updated'].append(
                                product.url)
                        else:
                            results['logging/items out of stock'].append(
                                product.url)
                    else:
                        print u"\tMatch FAILED: {} {}".format(
                            product.name.encode('ascii', errors='ignore'),
                            product.url)
                        # Out of stock items often just disappear from the feeds
                        if product.in_stock:
                            product.in_stock = False
                            product.save()
                            # If an item just switched, record it that way
                            results['logging/items out of stock'].append(
                                product.url)
                        else:
                            # If the item previously was out of stock, call it dropped
                            results['logging/items dropped'][
                                'match failed'].append(product.url)

        print "Updates saved"

        spider = FakeSpider("{} Datafeed".format(store.name), url_slug)
        reason = "finished"

        # Save results
        summary_url, log_url = upload_to_s3.S3Logger(results, spider,
                                                     reason).run()

        notify_slack.dump_stats(results, spider, reason,
                                (summary_url, log_url))