def fake_object(self, data): """Create a fake instance of Website from ES data.""" obj = Website(id=data['id']) # Set basic attributes on the fake instance using the data from ES. self._attach_fields(obj, data, ('default_locale', 'icon_hash', 'url')) # Set attributes with names that don't exactly match the one on the # model. obj.categories = data['category'] obj.devices = data['device'] if obj.icon_hash: # If we have an icon_hash, then we have an icon. All the icons we # store are PNGs. obj.icon_type = 'image/png' # Attach translations for all translated attributes. obj.default_locale # should be set first for this to work. self._attach_translations( obj, data, ('description', 'name', 'short_name', 'title')) # Some methods might need the raw data from ES, put it on obj. obj.es_data = data return obj
def create_instances(self, data): created_count = 0 for i, row in enumerate(data): if (i + 1) % 100 == 0: print 'Processing row %d... (%d websites created)' % ( i + 1, created_count) if self.limit and created_count >= self.limit: print 'Limit (%d) was hit, stopping the import' % self.limit break id_ = int(self.clean_string(row['Unique Moz ID'])) rank = int(self.clean_string(row['Rank'])) try: website = Website.objects.get(moz_id=id_) if self.overwrite: # Existing website and we were asked to overwrite: delete # it! website.delete() else: # Existing website and we were not asked to overwrite: skip # it, storing its ranking first to set popularity later. if self.set_popularity: self.remember_website_ranking(website, rank) continue except Website.DoesNotExist: pass with atomic(): try: devices = [] if row['Mobile URL']: devices += [DEVICE_GAIA.id, DEVICE_MOBILE.id, DEVICE_TABLET.id] if row['TV URL']: devices.append(DEVICE_TV.id) website = Website(moz_id=id_, status=STATUS_PUBLIC, devices=devices) self.set_default_locale(website, row) self.set_automatic_properties(website, row) self.set_categories(website, row) self.set_preferred_regions(website, row) self.set_url(website, row) website.save() if self.set_popularity: # Remember ranking to set popularity later. self.remember_website_ranking(website, rank) # Keywords use a M2M, so do that once the website is saved. self.set_tags(website, row) # Launch task to fetch icon once we know everything is OK. self.set_icon(website, row) created_count += 1 except ParsingError as e: print e.message return created_count
def test_get_icon_url_bigger_pk(self): website = Website(pk=98765432, icon_type="image/png") if not storage_is_remote(): expected = static_url("WEBSITE_ICON_URL") % (str(website.pk)[:-3], website.pk, 32, "never") else: path = "%s/%s-%s.png" % (website.get_icon_dir(), website.pk, 32) expected = "%s?modified=never" % storage.url(path) assert website.get_icon_url(32).endswith(expected), "Expected %s, got %s" % (expected, website.get_icon_url(32))
def test_get_icon_url_bigger_pk(self): website = Website(pk=98765432, icon_type='image/png') if not storage_is_remote(): expected = (static_url('WEBSITE_ICON_URL') % (str(website.pk)[:-3], website.pk, 32, 'never')) else: path = '%s/%s-%s.png' % (website.get_icon_dir(), website.pk, 32) expected = '%s?modified=never' % public_storage.url(path) assert website.get_icon_url(32).endswith(expected), ( 'Expected %s, got %s' % (expected, website.get_icon_url(32)))
def tearDown(self): # Taken from MultiSearchView test. for w in Webapp.objects.all(): w.delete() for w in Website.objects.all(): w.delete() super(TestDailyGamesView, self).tearDown() Webapp.get_indexer().unindexer(_all=True) Website.get_indexer().unindexer(_all=True) self.refresh(('webapp', 'website'))
def create_instances(self, data): created_count = 0 for i, row in enumerate(data): name = self.clean_string(row["Name"]) if not name: continue try: url = self.clean_string(row["URL"]) website = Website.objects.get(url=url) print "Game with URL %s already exists. Continuing." % url continue except Website.DoesNotExist: pass with atomic(): try: website = Website( categories=["games"], devices=[DEVICE_DESKTOP.id], description=self.clean_string(row["Description"]), name=name, status=STATUS_PUBLIC, ) self.set_url(website, row) website.save() # Keywords use a M2M, so do that once the website is saved. self.set_tags(website, row) # Launch task to fetch imgs once we know everything is OK. try: self.set_icon(website, row) self.set_promo_imgs(website, row) WebsiteIndexer.index_ids([website.id], no_delay=True) except Exception as e: print e WebsiteIndexer.refresh_index() website.delete() raise e created_count += 1 except ParsingError as e: print e.message return created_count
def fake_object(self, data): """Create a fake instance of Website from ES data.""" obj = Website(id=data['id']) # Set basic attributes on the fake instance using the data from ES. self._attach_fields(obj, data, ('default_locale', 'icon_hash', 'mobile_url', 'promo_img_hash', 'url')) # Set attributes with names that don't exactly match the one on the # model. obj.categories = data['category'] obj.devices = data['device'] obj.keywords_list = data['tags'] if obj.icon_hash: # If we have an icon_hash, then we have an icon. All the icons we # store are PNGs. obj.icon_type = 'image/png' # Attach translations for all translated attributes. obj.default_locale # should be set first for this to work. self._attach_translations( obj, data, ('description', 'name', 'short_name', 'title')) # Some methods might need the raw data from ES, put it on obj. obj.es_data = data return obj
def tearDown(self): for o in Webapp.objects.all(): o.delete() for o in Website.objects.all(): o.delete() for o in Extension.objects.all(): o.delete() super(TestMultiSearchView, self).tearDown() # Make sure to delete and unindex *all* things. Normally we wouldn't # care about stray deleted content staying in the index, but they can # have an impact on relevancy scoring so we need to make sure. This # needs to happen after super() has been called since it'll process the # indexing tasks that should happen post_request, and we need to wait # for ES to have done everything before continuing. Webapp.get_indexer().unindexer(_all=True) Website.get_indexer().unindexer(_all=True) Extension.get_indexer().unindexer(_all=True) self.refresh(('webapp', 'website', 'extension'))
def tearDown(self): for o in Webapp.objects.all(): o.delete() for o in Website.objects.all(): o.delete() for o in Extension.objects.all(): o.delete() super(TestMultiSearchView, self).tearDown() # Make sure to delete and unindex *all* things. Normally we wouldn't # care about stray deleted content staying in the index, but they can # have an impact on relevancy scoring so we need to make sure. This # needs to happen after super() has been called since it'll process the # indexing tasks that should happen post_request, and we need to wait # for ES to have done everything before continuing. Webapp.get_indexer().unindexer(_all=True) Website.get_indexer().unindexer(_all=True) Extension.get_indexer().unindexer(_all=True) HomescreenIndexer.unindexer(_all=True) self.refresh(('webapp', 'website', 'extension', 'homescreen'))
def fake_object(self, data): """Create a fake instance of Website from ES data.""" obj = Website(id=data['id']) # Set base attributes on the fake instance using the data from ES. self._attach_fields(obj, data, ('default_locale',)) # Set attributes with names that don't exactly match the one on the # model. obj.categories = data['category'] obj.devices = data['device'] # Attach translations for all translated attributes. obj.default_locale # should be set first for this to work. self._attach_translations( obj, data, ('url', 'description', 'short_title', 'title')) # Some methods might need the raw data from ES, put it on obj. obj.es_data = data return obj
def test_get_promo_img_url(self): website = Website(pk=337141) eq_(website.get_promo_img_url('640'), '') eq_(website.get_promo_img_url('1050'), '') website.promo_img_hash = 'chicken' ok_('website_promo_imgs/337/337141-640.png?modified=chicken' in website.get_promo_img_url('640')) ok_('website_promo_imgs/337/337141-1050.png?modified=chicken' in website.get_promo_img_url('1050'))
def fake_object(self, data): """Create a fake instance of Website from ES data.""" obj = Website(id=data["id"]) # Set basic attributes on the fake instance using the data from ES. self._attach_fields(obj, data, ("default_locale", "icon_hash", "mobile_url", "promo_img_hash", "tv_url", "url")) # Set attributes with names that don't exactly match the one on the # model. obj.categories = data["category"] obj.devices = data["device"] obj.keywords_list = data["tags"] if obj.icon_hash: # If we have an icon_hash, then we have an icon. All the icons we # store are PNGs. obj.icon_type = "image/png" # Attach translations for all translated attributes. obj.default_locale # should be set first for this to work. self._attach_translations(obj, data, ("description", "name", "short_name", "title")) # Some methods might need the raw data from ES, put it on obj. obj.es_data = data return obj
def test_get_promo_img_url(self): website = Website(pk=337141) eq_(website.get_promo_img_url("640"), "") eq_(website.get_promo_img_url("1050"), "") website.promo_img_hash = "chicken" ok_("website_promo_imgs/337/337141-640.png?modified=chicken" in website.get_promo_img_url("640")) ok_("website_promo_imgs/337/337141-1050.png?modified=chicken" in website.get_promo_img_url("1050"))
def test_q_num_requests(self): es = Website.get_indexer().get_es() orig_search = es.search es.counter = 0 def monkey_search(*args, **kwargs): es.counter += 1 return orig_search(*args, **kwargs) es.search = monkey_search with self.assertNumQueries(0): res = self.anon.get(self.url, data={'q': 'something'}) eq_(res.status_code, 200) eq_(res.json['meta']['total_count'], 1) eq_(len(res.json['objects']), 1) # Verify only one search call was made. eq_(es.counter, 1) es.search = orig_search
def create_instances(self, data): created_count = 0 for i, row in enumerate(data): name = self.clean_string(row['Name']) if not name: continue try: url = self.clean_string(row['URL']) website = Website.objects.get(url=url) print 'Game with URL %s already exists. Continuing.' % url continue except Website.DoesNotExist: pass with atomic(): try: website = Website( categories=['games'], devices=[DEVICE_DESKTOP.id], description=self.clean_string(row['Description']), name=name, status=STATUS_PUBLIC, ) self.set_url(website, row) website.save() # Keywords use a M2M, so do that once the website is saved. self.set_tags(website, row) # Launch task to fetch imgs once we know everything is OK. try: self.set_icon(website, row) self.set_promo_imgs(website, row) WebsiteIndexer.index_ids([website.id], no_delay=True) except Exception as e: print e WebsiteIndexer.refresh_index() website.delete() raise e created_count += 1 except ParsingError as e: print e.message return created_count
def test_get_icon_no_icon_pink(self): website = Website(pk=164) url = website.get_icon_url(32) assert url.endswith('hub/europe-africa-pink-32.png'), url
def test_get_icon_url_hash(self): website = Website(pk=1, icon_type='image/png', icon_hash='abcdef') assert website.get_icon_url(32).endswith('?modified=abcdef')
def test_get_icon_no_icon_blue(self): website = Website(pk=8) url = website.get_icon_url(32) assert url.endswith('hub/asia-australia-blue-32.png'), url
def test_get_icon_url_bigger_pk(self): website = Website(pk=98765432, icon_type='image/png') expected = (static_url('WEBSITE_ICON_URL') % (str(website.pk)[:-3], website.pk, 32, 'never')) assert website.get_icon_url(32).endswith(expected), ( 'Expected %s, got %s' % (expected, website.get_icon_url(32)))
def setUp(self): self.indexer = Website.get_indexer()()
def tearDown(self): Website.get_indexer().unindexer(_all=True) super(TestMultiSearchView, self).tearDown()
def test_devices_names(self): website = Website(devices=[DEVICE_DESKTOP.id, DEVICE_GAIA.id]) eq_(sorted(website.device_names), ['desktop', 'firefoxos'])
def tearDown(self): Website.get_indexer().unindexer(_all=True) super(TestWebsiteESView, self).tearDown()
def test_get_icon_no_icon(self): website = Website(pk=1) assert website.get_icon_url(32).endswith('/default-32.png')
def tearDown(self): Website.get_indexer().unindexer(_all=True) super(TestReviewerSearch, self).tearDown()
def tearDown(self): Webapp.get_indexer().unindexer(_all=True) Website.get_indexer().unindexer(_all=True) super(TestMultiSearchView, self).tearDown()
def test_get_preferred_regions(self): website = Website() website.preferred_regions = [URY.id, USA.id] eq_([r.slug for r in website.get_preferred_regions()], [USA.slug, URY.slug])
def test_get_icon_url_hash(self): website = Website(pk=1, icon_type="image/png", icon_hash="abcdef") assert website.get_icon_url(32).endswith("?modified=abcdef")
def test_get_icon_url(self): website = Website(pk=1, icon_type='image/png') expected = (static_url('WEBSITE_ICON_URL') % ('0', website.pk, 32, 'never')) assert website.get_icon_url(32).endswith(expected), ( 'Expected %s, got %s' % (expected, website.get_icon_url(32)))
def test_devices(self): website = Website(devices=[device.id for device in DEVICE_TYPE_LIST]) eq_(sorted(website.devices), sorted([device.id for device in DEVICE_TYPE_LIST]))