def clean(self): self.check_throttles(self.request) if not self.errors: self._clean_upload() parsed_data = parse_addon( self.cleaned_data['upload'], self.addon, user=self.request.user) if self.addon: # Make sure we don't already have this version. existing_versions = Version.unfiltered.filter( addon=self.addon, version=parsed_data['version']) if existing_versions.exists(): version = existing_versions[0] if version.deleted: msg = ugettext( u'Version {version} was uploaded before and ' u'deleted.') elif version.unreviewed_files: next_url = reverse('devhub.submit.version.details', args=[self.addon.slug, version.pk]) msg = DoubleSafe('%s <a href="%s">%s</a>' % ( ugettext(u'Version {version} already exists.'), next_url, ugettext(u'Continue with existing upload instead?') )) else: msg = ugettext(u'Version {version} already exists.') raise forms.ValidationError( msg.format(version=parsed_data['version'])) self.cleaned_data['parsed_data'] = parsed_data return self.cleaned_data
def test_trusted_public_to_public(self): upload = self.upload('extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_PUBLIC, trusted=True) eq_(self.addon.status, amo.STATUS_PUBLIC) f = File.from_upload(upload, self.version, self.platform, parse_data=d) eq_(f.status, amo.STATUS_PUBLIC)
def test_search_extension(self): upload = self.upload('search.xml') data = parse_addon(upload.path) file_ = File.from_upload(upload, self.version, self.platform, parsed_data=data) assert file_.filename.endswith('.xml') assert file_.no_restart
def create_version_for_upload(addon, upload, channel): """Note this function is only used for API uploads.""" fileupload_exists = addon.fileupload_set.filter( created__gt=upload.created, version=upload.version).exists() version_exists = Version.unfiltered.filter( addon=addon, version=upload.version).exists() if (fileupload_exists or version_exists): log.info('Skipping Version creation for {upload_uuid} that would ' ' cause duplicate version'.format(upload_uuid=upload.uuid)) else: # Import loop. from olympia.devhub.utils import add_dynamic_theme_tag from olympia.devhub.views import auto_sign_version log.info('Creating version for {upload_uuid} that passed ' 'validation'.format(upload_uuid=upload.uuid)) # Note: if we somehow managed to get here with an invalid add-on, # parse_addon() will raise ValidationError and the task will fail # loudly in sentry. parsed_data = parse_addon(upload, addon, user=upload.user) version = Version.from_upload( upload, addon, [x[0] for x in amo.APPS_CHOICES], channel, parsed_data=parsed_data) # The add-on's status will be STATUS_NULL when its first version is # created because the version has no files when it gets added and it # gets flagged as invalid. We need to manually set the status. if (addon.status == amo.STATUS_NULL and channel == amo.RELEASE_CHANNEL_LISTED): addon.update(status=amo.STATUS_NOMINATED) auto_sign_version(version) add_dynamic_theme_tag(version)
def test_public_to_unreviewed(self): upload = self.upload('extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_PUBLIC) eq_(self.addon.status, amo.STATUS_PUBLIC) f = File.from_upload(upload, self.version, self.platform, parse_data=d) eq_(f.status, amo.STATUS_UNREVIEWED)
def test_version_number(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.version == self.now
def test_rdf_parse_errors_are_ignored(self, run_validator, flag_is_active): run_validator.return_value = json.dumps({ "errors": 0, "success": True, "warnings": 0, "notices": 0, "message_tree": {}, "messages": [], "metadata": {} }) flag_is_active.return_value = True addon = Addon.objects.get(pk=3615) xpi = self.get_upload('extension.xpi') d = parse_addon(xpi.path) # Set up a duplicate upload: addon.update(guid=d['guid']) res = self.client.get(reverse('devhub.validate_addon')) doc = pq(res.content) upload_url = doc('#upload-addon').attr('data-upload-url') with storage.open(xpi.path, 'rb') as f: # Simulate JS file upload res = self.client.post(upload_url, {'upload': f}, follow=True) data = json.loads(res.content) # Simulate JS result polling: res = self.client.get(data['url']) data = json.loads(res.content) # Make sure we don't see a dupe UUID error: assert data['validation']['messages'] == [] # Simulate JS result polling on detail page: res = self.client.get(data['full_report_url'], follow=True) res = self.client.get(res.context['validate_url'], follow=True) data = json.loads(res.content) # Again, make sure we don't see a dupe UUID error: assert data['validation']['messages'] == []
def test_file_not_multi_package(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert not files[0].is_multi_package
def test_file_name(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert files[0].filename == u'delicious_bookmarks-0.1-fx-mac.xpi'
def test_extract(self): upload = self.get_upload('webextension_no_id.xpi') parsed_data = parse_addon(upload, user=mock.Mock()) # Remove the permissions from the parsed data so they aren't added. pdata_permissions = parsed_data.pop('permissions') pdata_cscript = parsed_data.pop('content_scripts') file_ = File.from_upload(upload, self.version, self.platform, parsed_data=parsed_data) assert WebextPermission.objects.count() == 0 assert file_.webext_permissions_list == [] call_command('extract_permissions') file_ = File.objects.get(id=file_.id) assert WebextPermission.objects.get(file=file_) permissions_list = file_.webext_permissions_list assert len(permissions_list) == 8 assert permissions_list == [ # first 5 are 'permissions' u'http://*/*', u'https://*/*', 'bookmarks', 'made up permission', 'https://google.com/', # last 3 are 'content_scripts' matches we treat the same '*://*.mozilla.org/*', '*://*.mozilla.com/*', 'https://*.mozillians.org/*'] assert permissions_list[0:5] == pdata_permissions assert permissions_list[5:8] == [x for y in [ cs['matches'] for cs in pdata_cscript] for x in y]
def migrate_legacy_dictionary_to_webextension(addon): """Migrate a single legacy dictionary to webextension format, creating a new package from the current_version, faking an upload to create a new Version instance.""" user = UserProfile.objects.get(pk=settings.TASK_USER_ID) now = datetime.now() # Wrap zip in FileUpload for Version.from_upload() to consume. upload = FileUpload.objects.create( user=user, valid=True) destination = os.path.join( user_media_path('addons'), 'temp', uuid.uuid4().hex + '.xpi') target_language = build_webext_dictionary_from_legacy(addon, destination) if not addon.target_locale: addon.update(target_locale=target_language) upload.update(path=destination) parsed_data = parse_addon(upload, addon=addon, user=user) # Create version. # WebExtension dictionaries are only compatible with Firefox Desktop # Firefox for Android uses the OS spellchecking. version = Version.from_upload( upload, addon, selected_apps=[amo.FIREFOX.id], channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) activity.log_create(amo.LOG.ADD_VERSION, version, addon, user=user) # Sign the file, and set it to public. That should automatically set # current_version to the version we created. file_ = version.all_files[0] sign_file(file_) file_.update(datestatuschanged=now, reviewed=now, status=amo.STATUS_PUBLIC)
def test_trusted_lite_to_lite(self): upload = self.upload('extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_LITE, trusted=True) assert self.addon.status == amo.STATUS_LITE f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_LITE
def from_upload(cls, upload, addon, platforms, send_signal=True, source=None, is_beta=False): from olympia.addons.models import AddonFeatureCompatibility data = utils.parse_addon(upload, addon) try: license = addon.versions.latest().license_id except Version.DoesNotExist: license = None v = cls.objects.create( addon=addon, version=data['version'], license_id=license, source=source ) log.info('New version: %r (%s) from %r' % (v, v.id, upload)) # Update the add-on e10s compatibility since we're creating a new # version that may change that. e10s_compatibility = data.get('e10s_compatibility') if e10s_compatibility is not None: feature_compatibility = ( AddonFeatureCompatibility.objects.get_or_create(addon=addon)[0] ) feature_compatibility.update(e10s=e10s_compatibility) AV = ApplicationsVersions for app in data.get('apps', []): AV(version=v, min=app.min, max=app.max, application=app.id).save() if addon.type == amo.ADDON_SEARCH: # Search extensions are always for all platforms. platforms = [amo.PLATFORM_ALL.id] else: platforms = cls._make_safe_platform_files(platforms) for platform in platforms: File.from_upload(upload, v, platform, parse_data=data, is_beta=is_beta) v.disable_old_files() # After the upload has been copied to all platforms, remove the upload. storage.delete(upload.path) if send_signal: version_uploaded.send(sender=v) # Track the time it took from first upload through validation # (and whatever else) until a version was created. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) upload_time = now_ts - upload_start log.info('Time for version {version} creation from upload: {delta}; ' 'created={created}; now={now}' .format(delta=upload_time, version=v, created=upload.created, now=now)) statsd.timing('devhub.version_created_from_upload', upload_time) return v
def repack_themes_for_69(addon_ids, **kw): log.info( '[%s@%s] Repacking themes to use 69+ properties starting at id: %s...' % (len(addon_ids), recreate_theme_previews.rate_limit, addon_ids[0])) addons = Addon.objects.filter(pk__in=addon_ids).no_transforms() olympia.core.set_user(UserProfile.objects.get(pk=settings.TASK_USER_ID)) for addon in addons: version = addon.current_version log.info('[CHECK] theme [%r] for deprecated properties' % addon) if not version: log.info('[INVALID] theme [%r] has no current_version' % addon) continue pause_all_tasks() try: timer = StopWatch('addons.tasks.repack_themes_for_69') timer.start() old_xpi = get_filepath(version.all_files[0]) old_data = parse_addon(old_xpi, minimal=True) new_data = new_69_theme_properties_from_old(old_data) if new_data != old_data: # if the manifest isn't the same let's repack new_version = new_theme_version_with_69_properties(version) log.info('[SUCCESS] Theme [%r], version [%r] updated to [%r]' % (addon, version, new_version)) else: log.info('[SKIP] No need for theme repack [%s]' % addon.id) timer.log_interval('') except (IOError, ValidationError, JSONDecodeError, SigningError) as ex: log.debug('[FAIL] Theme repack for [%r]:', addon, exc_info=ex) finally: resume_all_tasks()
def test_public_to_unreviewed(self): upload = self.upload('extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_PUBLIC) assert self.addon.status == amo.STATUS_PUBLIC f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_AWAITING_REVIEW
def test_lite_to_unreviewed(self): upload = self.upload('extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_LITE) assert self.addon.status == amo.STATUS_LITE f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_UNREVIEWED
def new_theme_version_with_69_properties(old_version): timer = StopWatch( 'addons.tasks.repack_themes_for_69.new_theme_version.') timer.start() author = get_user() # Wrap zip in FileUpload for Version from_upload to consume. upload = FileUpload.objects.create(user=author, valid=True) filename = uuid.uuid4().hex + '.xpi' destination = os.path.join(user_media_path('addons'), 'temp', filename) old_xpi = get_filepath(old_version.all_files[0]) build_69_compatible_theme( old_xpi, destination, get_next_version_number(old_version.addon)) upload.update(path=destination, name=filename) timer.log_interval('1.build_xpi') # Create addon + version parsed_data = parse_addon(upload, addon=old_version.addon, user=author) timer.log_interval('2.parse_addon') version = Version.from_upload( upload, old_version.addon, selected_apps=[amo.FIREFOX.id], channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) timer.log_interval('3.initialize_version') # And finally sign the files (actually just one) for file_ in version.all_files: sign_file(file_) file_.update( reviewed=datetime.now(), status=amo.STATUS_APPROVED) timer.log_interval('4.sign_files') return version
def handle_upload(self, request, addon, version_string): if "upload" in request.FILES: filedata = request.FILES["upload"] else: raise forms.ValidationError(_(u'Missing "upload" key in multipart file data.'), status.HTTP_400_BAD_REQUEST) # Parse the file to get and validate package data with the addon. pkg = parse_addon(filedata, addon) if not acl.submission_allowed(request.user, pkg): raise forms.ValidationError(_(u"You cannot submit this type of add-on"), status.HTTP_400_BAD_REQUEST) version_string = version_string or pkg["version"] if version_string and pkg["version"] != version_string: raise forms.ValidationError(_("Version does not match the manifest file."), status.HTTP_400_BAD_REQUEST) if addon is not None and addon.versions.filter(version=version_string).exists(): raise forms.ValidationError(_("Version already exists."), status.HTTP_409_CONFLICT) dont_allow_no_guid = not addon and not pkg.get("guid", None) and not pkg.get("is_webextension", False) if dont_allow_no_guid: raise forms.ValidationError( _("Only WebExtensions are allowed to omit the GUID"), status.HTTP_400_BAD_REQUEST ) if addon is None: addon = Addon.create_addon_from_upload_data(data=pkg, user=request.user, upload=filedata, is_listed=False) created = True else: created = False file_upload = handle_upload(filedata=filedata, user=request.user, addon=addon, submit=True) return file_upload, created
def handle_upload(self, request, addon, version_string): if 'upload' in request.FILES: filedata = request.FILES['upload'] else: raise forms.ValidationError( _(u'Missing "upload" key in multipart file data.'), status.HTTP_400_BAD_REQUEST) # Parse the file to get and validate package data with the addon. pkg = parse_addon(filedata, addon) if not acl.submission_allowed(request.user, pkg): raise forms.ValidationError( _(u'You cannot submit this type of add-on'), status.HTTP_400_BAD_REQUEST) version_string = version_string or pkg['version'] if version_string and pkg['version'] != version_string: raise forms.ValidationError( _('Version does not match the manifest file.'), status.HTTP_400_BAD_REQUEST) if (addon is not None and addon.versions.filter(version=version_string).exists()): raise forms.ValidationError( _('Version already exists.'), status.HTTP_409_CONFLICT) dont_allow_no_guid = ( not addon and not pkg.get('guid', None) and not pkg.get('is_webextension', False)) if dont_allow_no_guid: raise forms.ValidationError( _('Only WebExtensions are allowed to omit the GUID'), status.HTTP_400_BAD_REQUEST) if addon is None: addon = Addon.create_addon_from_upload_data( data=pkg, user=request.user, upload=filedata, is_listed=False) created = True channel = amo.RELEASE_CHANNEL_UNLISTED else: created = False last_version = addon.find_latest_version_including_rejected() if last_version: channel = last_version.channel else: # TODO: we need to properly handle channels here and fail if # no previous version to guess with. Also need to allow the # channel to be selected for versions. channel = (amo.RELEASE_CHANNEL_LISTED if addon.is_listed else amo.RELEASE_CHANNEL_UNLISTED) file_upload = handle_upload( filedata=filedata, user=request.user, addon=addon, submit=True, channel=channel) return file_upload, created
def clean(self): if not self.errors: self._clean_upload() xpi = parse_addon(self.cleaned_data['upload']) # We don't enforce name uniqueness for unlisted add-ons. if not self.cleaned_data.get('is_unlisted', False): clean_addon_name(xpi['name'], addon_type=xpi['type']) return self.cleaned_data
def test_file_platform_is_always_all(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert len(files) == 1 assert files[0].platform == amo.PLATFORM_ALL.id
def test_public_to_beta(self): upload = self.upload('beta-extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_PUBLIC) assert self.addon.status == amo.STATUS_PUBLIC f = File.from_upload(upload, self.version, self.platform, is_beta=True, parse_data=d) assert f.status == amo.STATUS_BETA
def test_beta_version_non_public(self): # Only public add-ons can get beta versions. upload = self.upload('beta-extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_LITE) assert self.addon.status == amo.STATUS_LITE f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_UNREVIEWED
def test_file_multi_package(self): self.upload = self.get_upload('multi-package.xpi') parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert files[0].is_multi_package
def test_android_creates_platform_files(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [amo.PLATFORM_ANDROID.id], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert sorted(amo.PLATFORMS[f.platform].shortname for f in files) == ( ['android'])
def test_litenominated_to_unreviewed(self): upload = self.upload('extension') d = parse_addon(upload.path) with mock.patch('olympia.addons.models.Addon.update_status'): # mock update_status because it doesn't like Addons without files. self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED) assert self.addon.status == amo.STATUS_LITE_AND_NOMINATED f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_UNREVIEWED
def test_app_versions(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert amo.FIREFOX in version.compatible_apps app = version.compatible_apps[amo.FIREFOX] assert app.min.version == '3.0' assert app.max.version == '3.6.*'
def test_new_version_while_public( self, generate_static_theme_preview_mock): self.addon = addon_factory(type=amo.ADDON_STATICTHEME) parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert len(version.all_files) == 1 assert generate_static_theme_preview_mock.call_count == 1
def test_file_name(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert files[0].filename == ( u'delicious_bookmarks-%s.xml' % self.now)
def clean(self): if not self.errors: self._clean_upload() xpi = parse_addon(self.cleaned_data['upload'], self.addon) # Make sure we don't already have the same non-rejected version. if self.addon.versions.filter(version=xpi['version']).exclude( files__status=amo.STATUS_DISABLED): raise forms.ValidationError( _(u'Version %s already exists') % xpi['version']) return self.cleaned_data
def test_new_version_is_10s_compatible_no_feature_compat_previously(self): assert not self.addon.feature_compatibility.pk self.upload = self.get_upload('multiprocess_compatible_extension.xpi') parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk assert self.addon.feature_compatibility.pk assert self.addon.feature_compatibility.e10s == amo.E10S_COMPATIBLE
def test_new_version_while_public(self, generate_static_theme_preview_mock): self.addon = addon_factory(type=amo.ADDON_STATICTHEME) parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert len(version.all_files) == 1 assert generate_static_theme_preview_mock.call_count == 1
def test_app_versions(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert amo.FIREFOX in version.compatible_apps app = version.compatible_apps[amo.FIREFOX] assert app.min.version == '3.0' assert app.max.version == '3.6.*'
def test_creates_platform_files(self): # We are creating files for 'all' platforms every time, #8752 parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files assert sorted(amo.PLATFORMS[f.platform].shortname for f in files) == (['all'])
def from_upload(cls, upload, addon, platforms, send_signal=True, source=None, is_beta=False): data = utils.parse_addon(upload, addon) try: license = addon.versions.latest().license_id except Version.DoesNotExist: license = None v = cls.objects.create(addon=addon, version=data['version'], license_id=license, source=source) log.info('New version: %r (%s) from %r' % (v, v.id, upload)) AV = ApplicationsVersions for app in data.get('apps', []): AV(version=v, min=app.min, max=app.max, application=app.id).save() if addon.type == amo.ADDON_SEARCH: # Search extensions are always for all platforms. platforms = [amo.PLATFORM_ALL.id] else: platforms = cls._make_safe_platform_files(platforms) for platform in platforms: File.from_upload(upload, v, platform, parse_data=data, is_beta=is_beta) v.disable_old_files() # After the upload has been copied to all platforms, remove the upload. storage.delete(upload.path) if send_signal: version_uploaded.send(sender=v) # Track the time it took from first upload through validation # (and whatever else) until a version was created. upload_start = utc_millesecs_from_epoch(upload.created) now = datetime.datetime.now() now_ts = utc_millesecs_from_epoch(now) upload_time = now_ts - upload_start log.info('Time for version {version} creation from upload: {delta}; ' 'created={created}; now={now}'.format(delta=upload_time, version=v, created=upload.created, now=now)) statsd.timing('devhub.version_created_from_upload', upload_time) return v
def test_trusted_public_to_beta(self): upload = self.upload('beta-extension') d = parse_addon(upload.path) self.addon.update(status=amo.STATUS_PUBLIC, trusted=True) eq_(self.addon.status, amo.STATUS_PUBLIC) f = File.from_upload(upload, self.version, self.platform, is_beta=True, parse_data=d) eq_(f.status, amo.STATUS_BETA)
def parse(self, addon=None, filename='webextension.xpi', **kwargs): path = 'src/olympia/files/fixtures/files/' + filename xpi = os.path.join(settings.ROOT, path) parse_addon_kwargs = { 'user': self.user, } parse_addon_kwargs.update(**kwargs) with open(xpi, 'rb') as fobj: file_ = SimpleUploadedFile(filename, fobj.read()) return parse_addon(file_, addon, **parse_addon_kwargs)
def test_trusted_litenominated_to_litenominated(self): upload = self.upload('extension') d = parse_addon(upload.path) with mock.patch('olympia.addons.models.Addon.update_status'): # mock update_status because it doesn't like Addons without files. self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED, trusted=True) assert self.addon.status == amo.STATUS_LITE_AND_NOMINATED f = File.from_upload(upload, self.version, self.platform, parse_data=d) assert f.status == amo.STATUS_LITE
def add_static_theme_from_lwt(lwt): # Try to handle LWT with no authors author = (lwt.listed_authors or [_get_lwt_default_author()])[0] # Wrap zip in FileUpload for Addon/Version from_upload to consume. upload = FileUpload.objects.create(user=author, valid=True) destination = os.path.join(user_media_path('addons'), 'temp', uuid.uuid4().hex + '.xpi') build_static_theme_xpi_from_lwt(lwt, destination) upload.update(path=destination) # Create addon + version parsed_data = parse_addon(upload, user=author) addon = Addon.initialize_addon_from_upload(parsed_data, upload, amo.RELEASE_CHANNEL_LISTED, author) # Version.from_upload sorts out platforms for us. version = Version.from_upload(upload, addon, platforms=None, channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) # Set category static_theme_categories = CATEGORIES.get(amo.FIREFOX.id, []).get(amo.ADDON_STATICTHEME, []) lwt_category = (lwt.categories.all() or [None])[0] # lwt only have 1 cat. lwt_category_slug = lwt_category.slug if lwt_category else 'other' static_category = static_theme_categories.get( lwt_category_slug, static_theme_categories.get('other')) AddonCategory.objects.create(addon=addon, category=Category.from_static_category( static_category, True)) # Set license lwt_license = PERSONA_LICENSES_IDS.get( lwt.persona.license, LICENSE_COPYRIGHT_AR) # default to full copyright static_license = License.objects.get(builtin=lwt_license.builtin) version.update(license=static_license) # Set tags for addon_tag in AddonTag.objects.filter(addon=lwt): AddonTag.objects.create(addon=addon, tag=addon_tag.tag) # Logging activity.log_create(amo.LOG.CREATE_STATICTHEME_FROM_PERSONA, addon, user=author) log.debug('New static theme %r created from %r' % (addon, lwt)) # And finally update the statuses version.all_files[0].update(status=amo.STATUS_PUBLIC) addon.update(status=amo.STATUS_PUBLIC) return addon
def get_addon_akismet_reports(user, user_agent, referrer, upload=None, addon=None, data=None): if not waffle.switch_is_active('akismet-spam-check'): return [] assert addon or upload properties = ('name', 'summary', 'description') if upload: addon = addon or upload.addon data = data or Addon.resolve_webext_translations( parse_addon(upload, addon, user), upload) if not data: return [] # bail early if no data to skip Translation lookups if addon and addon.has_listed_versions(): translation_ids_gen = (getattr(addon, prop + '_id', None) for prop in properties) translation_ids = [id_ for id_ in translation_ids_gen if id_] # Just get all the values together to make it simplier existing_data = { text_type(value) for value in Translation.objects.filter(id__in=translation_ids) } else: existing_data = () reports = [] for prop in properties: locales = data.get(prop) if not locales: continue if isinstance(locales, dict): # Avoid spam checking the same value more than once by using a set. locale_values = set(locales.values()) else: # It's not a localized dict, it's a flat string; wrap it anyway. locale_values = {locales} for comment in locale_values: if not comment or comment in existing_data: # We don't want to submit empty or unchanged content continue reports.append( AkismetReport.create_for_addon(upload=upload, addon=addon, user=user, property_name=prop, property_value=comment, user_agent=user_agent, referrer=referrer)) return reports
def clean(self): if not self.errors: self._clean_upload() xpi = parse_addon(self.cleaned_data['upload'], self.addon) # Make sure we don't already have the same non-rejected version. version_exists = Version.unfiltered.filter( addon=self.addon, version=xpi['version']).exists() if version_exists: msg = _(u'Version %s already exists, or was uploaded before.') raise forms.ValidationError(msg % xpi['version']) return self.cleaned_data
def test_rdf_parse_errors_are_ignored(self, run_validator, flag_is_active): run_validator.return_value = self.compatibility_result flag_is_active.return_value = True addon = Addon.objects.get(pk=3615) dupe_xpi = self.get_upload('extension.xpi') d = parse_addon(dupe_xpi) # Set up a duplicate upload: addon.update(guid=d['guid']) data = self.upload(filename=dupe_xpi.path) # Make sure we don't see a dupe UUID error: eq_(data['validation']['messages'], [])
def test_file_name(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) files = version.all_files # Since https://github.com/mozilla/addons-server/issues/8752 we are # selecting PLATFORM_ALL every time as a temporary measure until # platforms get removed. assert files[0].filename == u'delicious_bookmarks-0.1-fx.xpi'
def clean(self): if not self.version.is_allowed_upload(): raise forms.ValidationError( _('You cannot upload any more files for this version.')) # Check for errors in the xpi. if not self.errors: xpi = parse_addon(self.cleaned_data['upload'], self.addon) if xpi['version'] != self.version.version: raise forms.ValidationError(_("Version doesn't match")) return self.cleaned_data
def test_android_with_mixed_desktop_creates_platform_files(self): parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [amo.PLATFORM_LINUX.id, amo.PLATFORM_ANDROID.id], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data, ) files = version.all_files assert sorted(amo.PLATFORMS[f.platform].shortname for f in files) == (['android', 'linux'])
def test_new_version_with_additional_backgrounds( self, generate_static_theme_preview_mock): self.addon = addon_factory(type=amo.ADDON_STATICTHEME) path = 'src/olympia/devhub/tests/addons/static_theme_tiled.zip' self.upload = self.get_upload( abspath=os.path.join(settings.ROOT, path)) parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert len(version.all_files) == 1 assert generate_static_theme_preview_mock.call_count == 1
def test_new_version_is_10s_compatible(self): AddonFeatureCompatibility.objects.create(addon=self.addon) assert self.addon.feature_compatibility.e10s == amo.E10S_UNKNOWN self.upload = self.get_upload('multiprocess_compatible_extension.xpi') parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk assert self.addon.feature_compatibility.pk self.addon.feature_compatibility.reload() assert self.addon.feature_compatibility.e10s == amo.E10S_COMPATIBLE
def test_new_version_while_nominated(self, generate_static_theme_preview_mock): self.addon = addon_factory( type=amo.ADDON_STATICTHEME, status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}) parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert len(version.all_files) == 1 assert generate_static_theme_preview_mock.call_count == 1
def test_new_version_while_public( self, generate_static_theme_preview_mock): self.addon = addon_factory(type=amo.ADDON_STATICTHEME) parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload( self.upload, self.addon, [], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert len(version.all_files) == 1 assert generate_static_theme_preview_mock.call_count == 1 assert version.get_background_image_urls() == [ '%s/%s/%s/%s' % (user_media_url('addons'), str(self.addon.id), unicode(version.id), 'weta.png') ]
def test_commits_to_git_waffle_enabled(self): addon = addon_factory() upload = self.get_upload('webextension_no_id.xpi') user = user_factory(username='******') parsed_data = parse_addon(upload, addon, user=user) version = Version.from_upload( upload, addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk repo = AddonGitRepository(addon.pk) assert os.path.exists(repo.git_repository_path)
def test_new_version_is_webextension(self): self.addon.update(guid='@webextension-guid') AddonFeatureCompatibility.objects.create(addon=self.addon) assert self.addon.feature_compatibility.e10s == amo.E10S_UNKNOWN self.upload = self.get_upload('webextension.xpi') parsed_data = parse_addon(self.upload, self.addon, user=mock.Mock()) version = Version.from_upload(self.upload, self.addon, [self.platform], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk assert self.addon.feature_compatibility.pk self.addon.feature_compatibility.reload() assert self.addon.feature_compatibility.e10s == ( amo.E10S_COMPATIBLE_WEBEXTENSION)
def validate_file_path(path, channel): """Run the validator against a file at the given path, and return the results, which should be a json string. Should only be called directly by `validate_upload` or `validate_file` tasks. Search plugins don't call the linter but get linted by `annotate_search_plugin_validation`. All legacy extensions (including dictionaries, themes etc) are disabled via `annotate_legacy_addon_restrictions` except if they're signed by Mozilla. """ if path.endswith('.xml'): # search plugins are validated directly by addons-server # so that we don't have to call the linter or validator results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS) annotations.annotate_search_plugin_restriction(results=results, file_path=path, channel=channel) return json.dumps(results) # Annotate results with potential legacy add-ons restrictions. try: data = parse_addon(path, minimal=True) except NoManifestFound: # If no manifest is found, return empty data; the check below # explicitly looks for is_webextension is False, so it will not be # considered a legacy extension, and the linter will pick it up and # will know what message to return to the developer. data = {} except InvalidManifest: # Similarly, if we can't parse the manifest, let the linter pick that # up. data = {} is_legacy_extension = data.get('is_webextension', None) is False is_mozilla_signed = data.get('is_mozilla_signed_extension', None) is True if is_legacy_extension: results = deepcopy(amo.VALIDATOR_SKELETON_RESULTS) annotations.annotate_legacy_addon_restrictions( path=path, results=results, parsed_data=data, error=not is_mozilla_signed) return json.dumps(results) log.info('Running linter on %s', path) return run_addons_linter(path, channel=channel)
def test_commits_to_git_async(self, extract_mock): addon = addon_factory() upload = self.get_upload('webextension_no_id.xpi') upload.user = user_factory(username='******') parsed_data = parse_addon(upload, addon, user=upload.user) version = Version.from_upload( upload, addon, [self.selected_app], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk # Only once instead of twice extract_mock.assert_called_once_with( version_id=version.pk, author_id=upload.user.pk)
def handle_upload(self, request, addon, version_string): if 'upload' in request.FILES: filedata = request.FILES['upload'] else: raise forms.ValidationError( _(u'Missing "upload" key in multipart file data.'), status.HTTP_400_BAD_REQUEST) # Parse the file to get and validate package data with the addon. pkg = parse_addon(filedata, addon) if not acl.submission_allowed(request.user, pkg): raise forms.ValidationError( _(u'You cannot submit this type of add-on'), status.HTTP_400_BAD_REQUEST) version_string = version_string or pkg['version'] if version_string and pkg['version'] != version_string: raise forms.ValidationError( _('Version does not match the manifest file.'), status.HTTP_400_BAD_REQUEST) if (addon is not None and addon.versions.filter(version=version_string).exists()): raise forms.ValidationError(_('Version already exists.'), status.HTTP_409_CONFLICT) dont_allow_no_guid = (not addon and not pkg.get('guid', None) and not pkg.get('is_webextension', False)) if dont_allow_no_guid: raise forms.ValidationError( _('Only WebExtensions are allowed to omit the GUID'), status.HTTP_400_BAD_REQUEST) if addon is None: addon = Addon.create_addon_from_upload_data(data=pkg, user=request.user, upload=filedata, is_listed=False) created = True else: created = False file_upload = handle_upload(filedata=filedata, user=request.user, addon=addon, submit=True) return file_upload, created
def test_parse_addon(search_mock, xpi_mock): parse_addon('file.xpi', None) xpi_mock.assert_called_with('file.xpi', None, True) parse_addon('file.xml', None) search_mock.assert_called_with('file.xml', None) parse_addon('file.jar', None) xpi_mock.assert_called_with('file.jar', None, True)
def test_dupe_uuid(self, flag_is_active): flag_is_active.return_value = True addon = Addon.objects.get(pk=3615) data = parse_addon(self.get_upload('extension.xpi'), user=self.user) addon.update(guid=data['guid']) dupe_xpi = self.get_upload('extension.xpi') res = self.client.get(reverse('devhub.upload_detail', args=[dupe_xpi.uuid, 'json'])) assert res.status_code == 400, res.content data = json.loads(res.content) assert data['validation']['messages'] == ( [{'tier': 1, 'message': 'Duplicate add-on ID found.', 'type': 'error', 'fatal': True}]) assert data['validation']['ending_tier'] == 1
def test_doesnt_commit_to_git_by_default(self): addon = addon_factory() upload = self.get_upload('webextension_no_id.xpi') user = user_factory(username='******') parsed_data = parse_addon(upload, addon, user=user) with transaction.atomic(): version = Version.from_upload(upload, addon, [amo.FIREFOX.id], amo.RELEASE_CHANNEL_LISTED, parsed_data=parsed_data) assert version.pk repo = AddonGitRepository(addon.pk) assert not os.path.exists(repo.git_repository_path)
def test_force_extract(self): upload = self.get_upload('webextension_no_id.xpi') parsed_data = parse_addon(upload) # change the permissions so we can tell they've been re-parsed. parsed_data['permissions'].pop() file_ = File.from_upload(upload, self.version, self.platform, parsed_data=parsed_data) assert WebextPermission.objects.count() == 1 assert len(file_.webext_permissions) == 2 call_command('extract_permissions', force=True) file_ = File.objects.no_cache().get(id=file_.id) assert WebextPermission.objects.get(file=file_) permissions = file_.webext_permissions assert len(permissions) == 3
def get_addon_akismet_reports(user, user_agent, referrer, upload=None, addon=None, data=None, existing_data=()): if not waffle.switch_is_active('akismet-spam-check'): return [] assert addon or upload properties = ('name', 'summary', 'description') if upload: addon = addon or upload.addon if not data: try: data = Addon.resolve_webext_translations( parse_addon(upload, addon, user, minimal=True), upload) except ValidationError: # The xpi is broken - it'll be rejected by the linter so abort. return [] reports = [] for prop in properties: locales = data.get(prop) if not locales: continue if isinstance(locales, dict): # Avoid spam checking the same value more than once by using a set. locale_values = set(locales.values()) else: # It's not a localized dict, it's a flat string; wrap it anyway. locale_values = {locales} for comment in locale_values: if not comment or comment in existing_data: # We don't want to submit empty or unchanged content continue report = AkismetReport.create_for_addon(upload=upload, addon=addon, user=user, property_name=prop, property_value=comment, user_agent=user_agent, referrer=referrer) reports.append((prop, report)) return reports