def repack_themes_for_69(addon_ids, **kw): log.info( '[%s@%s] Repacking themes to use 69+ properties starting at id: %s...' % (len(addon_ids), recreate_theme_previews.rate_limit, addon_ids[0])) addons = Addon.objects.filter(pk__in=addon_ids).no_transforms() olympia.core.set_user(UserProfile.objects.get(pk=settings.TASK_USER_ID)) for addon in addons: version = addon.current_version log.info('[CHECK] theme [%r] for deprecated properties' % addon) if not version: log.info('[INVALID] theme [%r] has no current_version' % addon) continue pause_all_tasks() try: timer = StopWatch('addons.tasks.repack_themes_for_69') timer.start() old_xpi = get_filepath(version.all_files[0]) old_data = parse_addon(old_xpi, minimal=True) new_data = new_69_theme_properties_from_old(old_data) if new_data != old_data: # if the manifest isn't the same let's repack new_version = new_theme_version_with_69_properties(version) log.info('[SUCCESS] Theme [%r], version [%r] updated to [%r]' % (addon, version, new_version)) else: log.info('[SKIP] No need for theme repack [%s]' % addon.id) timer.log_interval('') except (IOError, ValidationError, JSONDecodeError, SigningError) as ex: log.debug('[FAIL] Theme repack for [%r]:', addon, exc_info=ex) finally: resume_all_tasks()
def migrate_lwts_to_static_themes(ids, **kw): """With the specified ids, create new static themes based on an existing lightweight themes (personas), and delete the lightweight themes after.""" mlog = olympia.core.logger.getLogger('z.task.lwtmigrate') mlog.info( '[Info] Migrating LWT to static theme %d-%d [%d].', ids[0], ids[-1], len(ids)) # Incoming ids should already by type=persona only lwts = Addon.objects.filter(id__in=ids) for lwt in lwts: static = None pause_all_tasks() try: timer = StopWatch('addons.tasks.migrate_lwts_to_static_theme') timer.start() with translation.override(lwt.default_locale): static = add_static_theme_from_lwt(lwt) mlog.info( '[Success] Static theme %r created from LWT %r', static, lwt) if not static: raise Exception('add_static_theme_from_lwt returned falsey') MigratedLWT.objects.create( lightweight_theme=lwt, getpersonas_id=lwt.persona.persona_id, static_theme=static) # Steal the lwt's slug after it's deleted. slug = lwt.slug lwt.delete(send_delete_email=False) static.update(slug=slug) timer.log_interval('') except Exception as e: # If something went wrong, don't migrate - we need to debug. mlog.debug('[Fail] LWT %r:', lwt, exc_info=e) finally: resume_all_tasks()
class Sitemap(DjangoSitemap): limit = 1000 i18n = True languages = FRONTEND_LANGUAGES alternates = True # x_default = False # TODO: enable this when we can validate it works well _cached_items = [] def __init__(self, *args, **kwargs): self.timer = StopWatch( f'amo.sitemap.{self.__class__.__name__}.render_xml') super().__init__(*args, **kwargs) def _location(self, item, force_lang_code=None): if self.i18n: obj, lang_code = item # modified from Django implementation - we don't rely on locale for urls with override_url_prefix(locale=(force_lang_code or lang_code)): return self.location(obj) return self.location(item) def _items(self): items = self.items() self.timer.log_interval('items') if self.i18n: # Create (item, lang_code) tuples for all items and languages. # This is necessary to paginate with all languages already considered. return LazyTupleList(items, self._languages()) return items def items(self): return self._cached_items def render_xml(self, app_name, page): self.timer.start() site_url = urlparse(settings.EXTERNAL_SITE_URL) # Sitemap.get_urls wants a Site instance to get the domain, so just fake it. site = namedtuple('FakeSite', 'domain')(site_url.netloc) with override_url_prefix(app_name=app_name): self.timer.log_interval('setup-done') xml = loader.render_to_string( 'sitemap.xml', { 'urlset': self.get_urls( page=page, site=site, protocol=site_url.scheme) }, ) self.timer.log_interval('finish') return xml @property def _current_app(self): return amo.APPS[get_url_prefix().app]
def migrate_lwts_to_static_themes(ids, **kw): """With the specified ids, create new static themes based on an existing lightweight themes (personas), and delete the lightweight themes after.""" mlog = olympia.core.logger.getLogger('z.task.lwtmigrate') mlog.info('[Info] Migrating LWT to static theme %d-%d [%d].', ids[0], ids[-1], len(ids)) # Incoming ids should already by type=persona only lwts = Addon.objects.filter(id__in=ids) for lwt in lwts: static = None pause_all_tasks() try: timer = StopWatch('addons.tasks.migrate_lwts_to_static_theme') timer.start() with translation.override(lwt.default_locale): static = add_static_theme_from_lwt(lwt) mlog.info('[Success] Static theme %r created from LWT %r', static, lwt) if not static: raise Exception('add_static_theme_from_lwt returned falsey') MigratedLWT.objects.create(lightweight_theme=lwt, getpersonas_id=lwt.persona.persona_id, static_theme=static) # Steal the lwt's slug after it's deleted. slug = lwt.slug lwt.delete(send_delete_email=False) static.update(slug=slug) timer.log_interval('') except Exception as e: # If something went wrong, don't migrate - we need to debug. mlog.debug('[Fail] LWT %r:', lwt, exc_info=e) finally: resume_all_tasks()
def new_theme_version_with_69_properties(old_version): timer = StopWatch( 'addons.tasks.repack_themes_for_69.new_theme_version.') timer.start() author = get_user() # Wrap zip in FileUpload for Version from_upload to consume. upload = FileUpload.objects.create(user=author, valid=True) filename = uuid.uuid4().hex + '.xpi' destination = os.path.join(user_media_path('addons'), 'temp', filename) old_xpi = get_filepath(old_version.all_files[0]) build_69_compatible_theme( old_xpi, destination, get_next_version_number(old_version.addon)) upload.update(path=destination, name=filename) timer.log_interval('1.build_xpi') # Create addon + version parsed_data = parse_addon(upload, addon=old_version.addon, user=author) timer.log_interval('2.parse_addon') version = Version.from_upload( upload, old_version.addon, selected_apps=[amo.FIREFOX.id], channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) timer.log_interval('3.initialize_version') # And finally sign the files (actually just one) for file_ in version.all_files: sign_file(file_) file_.update( reviewed=datetime.now(), status=amo.STATUS_APPROVED) timer.log_interval('4.sign_files') return version
def add_static_theme_from_lwt(lwt): from olympia.activity.models import AddonLog timer = StopWatch( 'addons.tasks.migrate_lwts_to_static_theme.add_from_lwt.') timer.start() olympia.core.set_user(UserProfile.objects.get(pk=settings.TASK_USER_ID)) # Try to handle LWT with no authors author = (lwt.listed_authors or [_get_lwt_default_author()])[0] # Wrap zip in FileUpload for Addon/Version from_upload to consume. upload = FileUpload.objects.create(user=author, valid=True) filename = uuid.uuid4().hex + '.xpi' destination = os.path.join(user_media_path('addons'), 'temp', filename) build_static_theme_xpi_from_lwt(lwt, destination) upload.update(path=destination, name=filename) timer.log_interval('1.build_xpi') # Create addon + version parsed_data = parse_addon(upload, user=author) timer.log_interval('2a.parse_addon') addon = Addon.initialize_addon_from_upload(parsed_data, upload, amo.RELEASE_CHANNEL_LISTED, author) addon_updates = {} timer.log_interval('2b.initialize_addon') # static themes are only compatible with Firefox at the moment, # not Android version = Version.from_upload(upload, addon, selected_apps=[amo.FIREFOX.id], channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) timer.log_interval('3.initialize_version') # Set category lwt_category = (lwt.categories.all() or [None])[0] # lwt only have 1 cat. lwt_category_slug = lwt_category.slug if lwt_category else 'other' for app, type_dict in CATEGORIES.items(): static_theme_categories = type_dict.get(amo.ADDON_STATICTHEME, []) static_category = static_theme_categories.get( lwt_category_slug, static_theme_categories.get('other')) AddonCategory.objects.create(addon=addon, category=Category.from_static_category( static_category, True)) timer.log_interval('4.set_categories') # Set license lwt_license = PERSONA_LICENSES_IDS.get( lwt.persona.license, LICENSE_COPYRIGHT_AR) # default to full copyright static_license = License.objects.get(builtin=lwt_license.builtin) version.update(license=static_license) timer.log_interval('5.set_license') # Set tags for addon_tag in AddonTag.objects.filter(addon=lwt): AddonTag.objects.create(addon=addon, tag=addon_tag.tag) timer.log_interval('6.set_tags') # Steal the ratings (even with soft delete they'll be deleted anyway) addon_updates.update(average_rating=lwt.average_rating, bayesian_rating=lwt.bayesian_rating, total_ratings=lwt.total_ratings, text_ratings_count=lwt.text_ratings_count) Rating.unfiltered.filter(addon=lwt).update(addon=addon, version=version) timer.log_interval('7.move_ratings') # Replace the lwt in collections CollectionAddon.objects.filter(addon=lwt).update(addon=addon) # Modify the activity log entry too. rating_activity_log_ids = [ l.id for l in amo.LOG if getattr(l, 'action_class', '') == 'review' ] addonlog_qs = AddonLog.objects.filter( addon=lwt, activity_log__action__in=rating_activity_log_ids) [alog.transfer(addon) for alog in addonlog_qs.iterator()] timer.log_interval('8.move_activity_logs') # Copy the ADU statistics - the raw(ish) daily UpdateCounts for stats # dashboard and future update counts, and copy the average_daily_users. # hotness will be recalculated by the deliver_hotness() cron in a more # reliable way that we could do, so skip it entirely. migrate_theme_update_count(lwt, addon) addon_updates.update(average_daily_users=lwt.persona.popularity or 0, hotness=0) timer.log_interval('9.copy_statistics') # Logging activity.log_create(amo.LOG.CREATE_STATICTHEME_FROM_PERSONA, addon, user=author) # And finally sign the files (actually just one) for file_ in version.all_files: sign_file(file_) file_.update(datestatuschanged=lwt.last_updated, reviewed=datetime.now(), status=amo.STATUS_APPROVED) timer.log_interval('10.sign_files') addon_updates['status'] = amo.STATUS_APPROVED # set the modified and creation dates to match the original. addon_updates['created'] = lwt.created addon_updates['modified'] = lwt.modified addon_updates['last_updated'] = lwt.last_updated addon.update(**addon_updates) return addon
def repack_fileupload(results, upload_pk): log.info('Starting task to repackage FileUpload %s', upload_pk) upload = FileUpload.objects.get(pk=upload_pk) # When a FileUpload is created and a file added to it, if it's a xpi/zip, # it should be move to upload.path, and it should have a .xpi extension, # so we only need to care about that extension here. # We don't trust upload.name: it's the original filename as used by the # developer, so it could be something else. if upload.path.endswith('.xpi'): timer = StopWatch('files.tasks.repack_fileupload.') timer.start() try: tempdir = tempfile.mkdtemp() # *not* on TMP_PATH, we want local fs extract_zip(upload.path, tempdir=tempdir) except Exception as exc: # Something bad happened, maybe we couldn't parse the zip file. # @validation_task should ensure the exception is caught and # transformed in a generic error message for the developer, so we # just log it and re-raise. log.exception('Could not extract upload %s for repack.', upload_pk, exc_info=exc) raise timer.log_interval('1.extracted') log.info('Zip from upload %s extracted, repackaging', upload_pk) file_ = tempfile.NamedTemporaryFile(suffix='.zip', delete=False) shutil.make_archive(os.path.splitext(file_.name)[0], 'zip', tempdir) with open(file_.name, 'rb') as f: upload.hash = 'sha256:%s' % get_sha256(f) timer.log_interval('2.repackaged') log.info('Zip from upload %s repackaged, moving file back', upload_pk) move_stored_file(file_.name, upload.path) timer.log_interval('3.moved') upload.save() timer.log_interval('4.end') else: log.info('Not repackaging upload %s, it is not a xpi file.', upload_pk) return results
def repack_fileupload(results, upload_pk): log.info('Starting task to repackage FileUpload %s', upload_pk) upload = FileUpload.objects.get(pk=upload_pk) # When a FileUpload is created and a file added to it, if it's a xpi/zip, # it should be move to upload.path, and it should have a .zip extension, # so we only need to care about that extension here. # We don't trust upload.name: it's the original filename as used by the # developer, so it could be something else. if upload.path.endswith('.zip'): timer = StopWatch('files.tasks.repack_fileupload.') timer.start() # tempdir must *not* be on TMP_PATH, we want local fs instead. It will be # deleted automatically once we exit the context manager. with tempfile.TemporaryDirectory( prefix='repack_fileupload_extract') as tempdir: try: extract_zip(upload.path, tempdir=tempdir) if waffle.switch_is_active('enable-manifest-normalization'): manifest = Path(tempdir) / 'manifest.json' if manifest.exists(): try: xpi_data = parse_xpi(upload.path, minimal=True) if not xpi_data.get('is_mozilla_signed_extension', False): json_data = ManifestJSONExtractor( manifest.read_bytes()).data manifest.write_text( json.dumps(json_data, indent=2)) except Exception: # If we cannot normalize the manifest file, we skip # this step and let the linter catch the exact # cause in order to return a more appropriate error # than "unexpected error", which would happen if # this task was handling the error itself. pass except Exception as exc: # Something bad happened, maybe we couldn't parse the zip file. # @validation_task should ensure the exception is caught and # transformed in a generic error message for the developer, so we # just log it and re-raise. log.exception('Could not extract upload %s for repack.', upload_pk, exc_info=exc) raise timer.log_interval('1.extracted') log.info('Zip from upload %s extracted, repackaging', upload_pk) # We'll move the file to its final location below with move_stored_file(), # so don't let tempfile delete it. file_ = tempfile.NamedTemporaryFile(dir=settings.TMP_PATH, suffix='.zip', delete=False) shutil.make_archive( os.path.splitext(file_.name)[0], 'zip', tempdir) with open(file_.name, 'rb') as f: upload.hash = 'sha256:%s' % get_sha256(f) timer.log_interval('2.repackaged') log.info('Zip from upload %s repackaged, moving file back', upload_pk) storage.move_stored_file(file_.name, upload.path) timer.log_interval('3.moved') upload.save() timer.log_interval('4.end') else: log.info('Not repackaging upload %s, it is not a zip file.', upload_pk) return results
def add_static_theme_from_lwt(lwt): from olympia.activity.models import AddonLog timer = StopWatch( 'addons.tasks.migrate_lwts_to_static_theme.add_from_lwt.') timer.start() olympia.core.set_user(UserProfile.objects.get(pk=settings.TASK_USER_ID)) # Try to handle LWT with no authors author = (lwt.listed_authors or [_get_lwt_default_author()])[0] # Wrap zip in FileUpload for Addon/Version from_upload to consume. upload = FileUpload.objects.create( user=author, valid=True) filename = uuid.uuid4().hex + '.xpi' destination = os.path.join(user_media_path('addons'), 'temp', filename) build_static_theme_xpi_from_lwt(lwt, destination) upload.update(path=destination, name=filename) timer.log_interval('1.build_xpi') # Create addon + version parsed_data = parse_addon(upload, user=author) timer.log_interval('2a.parse_addon') addon = Addon.initialize_addon_from_upload( parsed_data, upload, amo.RELEASE_CHANNEL_LISTED, author) addon_updates = {} timer.log_interval('2b.initialize_addon') # static themes are only compatible with Firefox at the moment, # not Android version = Version.from_upload( upload, addon, selected_apps=[amo.FIREFOX.id], channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) timer.log_interval('3.initialize_version') # Set category lwt_category = (lwt.categories.all() or [None])[0] # lwt only have 1 cat. lwt_category_slug = lwt_category.slug if lwt_category else 'other' for app, type_dict in CATEGORIES.items(): static_theme_categories = type_dict.get(amo.ADDON_STATICTHEME, []) static_category = static_theme_categories.get( lwt_category_slug, static_theme_categories.get('other')) AddonCategory.objects.create( addon=addon, category=Category.from_static_category(static_category, True)) timer.log_interval('4.set_categories') # Set license lwt_license = PERSONA_LICENSES_IDS.get( lwt.persona.license, LICENSE_COPYRIGHT_AR) # default to full copyright static_license = License.objects.get(builtin=lwt_license.builtin) version.update(license=static_license) timer.log_interval('5.set_license') # Set tags for addon_tag in AddonTag.objects.filter(addon=lwt): AddonTag.objects.create(addon=addon, tag=addon_tag.tag) timer.log_interval('6.set_tags') # Steal the ratings (even with soft delete they'll be deleted anyway) addon_updates.update( average_rating=lwt.average_rating, bayesian_rating=lwt.bayesian_rating, total_ratings=lwt.total_ratings, text_ratings_count=lwt.text_ratings_count) Rating.unfiltered.filter(addon=lwt).update(addon=addon, version=version) timer.log_interval('7.move_ratings') # Replace the lwt in collections CollectionAddon.objects.filter(addon=lwt).update(addon=addon) # Modify the activity log entry too. rating_activity_log_ids = [ l.id for l in amo.LOG if getattr(l, 'action_class', '') == 'review'] addonlog_qs = AddonLog.objects.filter( addon=lwt, activity_log__action__in=rating_activity_log_ids) [alog.transfer(addon) for alog in addonlog_qs.iterator()] timer.log_interval('8.move_activity_logs') # Copy the ADU statistics - the raw(ish) daily UpdateCounts for stats # dashboard and future update counts, and copy the average_daily_users. # hotness will be recalculated by the deliver_hotness() cron in a more # reliable way that we could do, so skip it entirely. migrate_theme_update_count(lwt, addon) addon_updates.update( average_daily_users=lwt.persona.popularity or 0, hotness=0) timer.log_interval('9.copy_statistics') # Logging activity.log_create( amo.LOG.CREATE_STATICTHEME_FROM_PERSONA, addon, user=author) # And finally sign the files (actually just one) for file_ in version.all_files: sign_file(file_) file_.update( datestatuschanged=lwt.last_updated, reviewed=datetime.now(), status=amo.STATUS_APPROVED) timer.log_interval('10.sign_files') addon_updates['status'] = amo.STATUS_APPROVED # set the modified and creation dates to match the original. addon_updates['created'] = lwt.created addon_updates['modified'] = lwt.modified addon_updates['last_updated'] = lwt.last_updated addon.update(**addon_updates) return addon
def __init__(self, *args, **kwargs): self.timer = StopWatch( f'amo.sitemap.{self.__class__.__name__}.render_xml') super().__init__(*args, **kwargs)
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) instance = self.get_object() addon = self.get_addon_object() has_source = request.data.get('source') if has_source: timer = StopWatch('addons.views.AddonVersionViewSet.update.') timer.start() log.info( 'update, source upload received, addon.slug: %s, version.id: %s', addon.slug, instance.id, ) timer.log_interval('1.source_received') serializer = self.get_serializer(instance, data=request.data, partial=partial) if has_source: log.info( 'update, serializer loaded, addon.slug: %s, version.id: %s', addon.slug, instance.id, ) timer.log_interval('2.serializer_loaded') serializer.is_valid(raise_exception=True) if has_source: log.info( 'update, serializer validated, addon.slug: %s, version.id: %s', addon.slug, instance.id, ) timer.log_interval('3.serializer_validated') self.perform_update(serializer) if has_source: log.info( 'update, data saved, addon.slug: %s, version.id: %s', addon.slug, instance.id, ) timer.log_interval('4.data_saved') if getattr(instance, '_prefetched_objects_cache', None): # If 'prefetch_related' has been applied to a queryset, we need to # forcibly invalidate the prefetch cache on the instance. instance._prefetched_objects_cache = {} return Response(serializer.data)
def create(self, request, *args, **kwargs): addon = self.get_addon_object() has_source = request.data.get('source') if has_source: timer = StopWatch('addons.views.AddonVersionViewSet.create.') timer.start() log.info( 'create, source upload received, addon.slug: %s', addon.slug, ) timer.log_interval('1.source_received') serializer = self.get_serializer(data=request.data) if has_source: log.info( 'create, serializer loaded, addon.slug: %s', addon.slug, ) timer.log_interval('2.serializer_loaded') serializer.is_valid(raise_exception=True) if has_source: log.info( 'create, serializer validated, addon.slug: %s', addon.slug, ) timer.log_interval('3.serializer_validated') self.perform_create(serializer) if has_source: log.info( 'create, data saved, addon.slug: %s', addon.slug, ) timer.log_interval('4.data_saved') headers = self.get_success_headers(serializer.data) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)