def __init__(self, request, platform, version): self.request = request self.platform = platform self.version = version self.compat_mode = 'strict' if version_int(self.version) >= version_int('10.0'): self.compat_mode = 'ignore'
def extract_compatibility_info(cls, obj, version_obj): """Return compatibility info for the specified version_obj, as will be indexed in ES.""" compatible_apps = {} for app, appver in version_obj.compatible_apps.items(): if appver: min_, max_ = appver.min.version_int, appver.max.version_int min_human, max_human = appver.min.version, appver.max.version if not version_obj.files.filter( strict_compatibility=True).exists(): # The files attached to this version are not using strict # compatibility, so the max version essentially needs to be # ignored - let's fake a super high one. We leave max_human # alone to leave the API representation intact. max_ = version_int('9999') else: # Fake wide compatibility for add-ons with no info. We don't # want to reindex every time a new version of the app is # released, so we directly index a super high version as the # max. min_human, max_human = amo.D2C_MIN_VERSIONS.get( app.id, '1.0'), amo.FAKE_MAX_VERSION, min_, max_ = version_int(min_human), version_int(max_human) compatible_apps[app.id] = { 'min': min_, 'min_human': min_human, 'max': max_, 'max_human': max_human, } return compatible_apps
def test_bump_version_in_model(self, mock_sign_file): # We want to make sure each file has been signed. self.file2 = amo.tests.file_factory(version=self.version) self.file2.update(filename='jetpack-b.xpi') backup_file2_path = u'{0}.backup_signature'.format( self.file2.file_path) try: fpath = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(fpath, self.file_.file_path): with amo.tests.copy_file( 'src/olympia/files/fixtures/files/jetpack.xpi', self.file2.file_path): file_hash = self.file_.generate_hash() file2_hash = self.file2.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert mock_sign_file.call_count == 2 self.version.reload() assert self.version.version == '1.3.1-signed' assert self.version.version_int == version_int( '1.3.1-signed') assert file_hash != self.file_.generate_hash() assert file2_hash != self.file2.generate_hash() self.assert_backup() assert os.path.exists(backup_file2_path) finally: if os.path.exists(backup_file2_path): os.unlink(backup_file2_path)
def test_dont_sign_dont_bump_version_bad_zipfile(self, mock_sign_file): with amo.tests.copy_file(__file__, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert not mock_sign_file.called self.version.reload() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') assert file_hash == self.file_.generate_hash() self.assert_no_backup()
def get_values(self): appversion = self.request.GET.get(self.query_param) app = AddonAppFilterParam(self.request).get_value() if appversion and app: # Get a min version less than X.0, and a max greater than X.0a low = version_int(appversion) high = version_int(appversion + 'a') if low < version_int('10.0'): raise ValueError('appversion is invalid.') return app, low, high raise ValueError('Can not filter by appversion, a param is missing.')
def test_dont_sign_dont_bump_sign_error(self, mock_sign_file): mock_sign_file.side_effect = IOError() fpath = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(fpath, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert mock_sign_file.called self.version.reload() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') assert file_hash == self.file_.generate_hash() self.assert_no_backup()
def test_dont_bump_not_signed(self, mock_sign_file): mock_sign_file.return_value = None # Pretend we didn't sign. fpath = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(fpath, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert mock_sign_file.called self.version.reload() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') assert file_hash == self.file_.generate_hash() self.assert_no_backup()
def _filter_search(request, qs, query, filters, sorting, sorting_default='-weekly_downloads', types=None): """Filter an ES queryset based on a list of filters.""" if types is None: types = [] APP = request.APP # Intersection of the form fields present and the filters we want to apply. show = [f for f in filters if query.get(f)] if query.get('q'): qs = qs.query(or_=name_query(query['q'])) if 'platform' in show and query['platform'] in amo.PLATFORM_DICT: ps = (amo.PLATFORM_DICT[query['platform']].id, amo.PLATFORM_ALL.id) # If we've selected "All Systems" don't filter by platform. if ps[0] != ps[1]: qs = qs.filter(platforms__in=ps) if 'appver' in show: # Get a min version less than X.0. low = version_int(query['appver']) # Get a max version greater than X.0a. high = version_int(query['appver'] + 'a') # If we're not using D2C then fall back to appversion checking. extensions_shown = (not query.get('atype') or query['atype'] == amo.ADDON_EXTENSION) if not extensions_shown or low < version_int('10.0'): qs = qs.filter(**{ 'current_version.compatible_apps.%s.max__gte' % APP.id: high, 'current_version.compatible_apps.%s.min__lte' % APP.id: low }) if 'atype' in show and query['atype'] in amo.ADDON_TYPES: qs = qs.filter(type=query['atype']) else: qs = qs.filter(type__in=types) if 'cat' in show: cat = (Category.objects.filter(id=query['cat']) .filter(Q(application=APP.id) | Q(type=amo.ADDON_SEARCH))) if not cat.exists(): show.remove('cat') if 'cat' in show: qs = qs.filter(category=query['cat']) if 'tag' in show: qs = qs.filter(tags=query['tag']) if 'sort' in show: qs = qs.order_by(sorting[query['sort']]) elif not query.get('q'): # Sort by a default if there was no query so results are predictable. qs = qs.order_by(sorting_default) return qs
def test_resign_bump_version_in_model_if_force(self, mock_sign_file): with amo.tests.copy_file( 'src/olympia/files/fixtures/files/new-addon-signature.xpi', self.file_.file_path): self.file_.update(is_signed=True) file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk], force=True) assert mock_sign_file.called self.version.reload() assert self.version.version == '1.3.1-signed' assert self.version.version_int == version_int('1.3.1-signed') assert file_hash != self.file_.generate_hash() self.assert_backup()
def get_values(self): appversion = self.request.GET.get(self.query_param) app = AddonAppQueryParam(self.request).get_value() if appversion and app: # Get a min version less than X.0, and a max greater than X.0a low = version_int(appversion) high = version_int(appversion + 'a') if low < version_int('10.0'): raise ValueError('Invalid "%s" parameter.' % self.query_param) return app, low, high raise ValueError( 'Invalid combination of "%s" and "%s" parameters.' % ( AddonAppQueryParam.query_param, self.query_param))
def make_langpack(version): versions = (version, '%s.*' % version) for version in versions: AppVersion.objects.get_or_create(application=amo.FIREFOX.id, version=version, version_int=version_int(version)) return make_xpi({ 'install.rdf': """<?xml version="1.0"?> <RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:em="http://www.mozilla.org/2004/em-rdf#"> <Description about="urn:mozilla:install-manifest" em:id="*****@*****.**" em:name="Foo Language Pack" em:version="{0}" em:type="8" em:creator="mozilla.org"> <em:targetApplication> <Description> <em:id>{{ec8030f7-c20a-464f-9b0e-13a3a9e97384}}</em:id> <em:minVersion>{0}</em:minVersion> <em:maxVersion>{1}</em:maxVersion> </Description> </em:targetApplication> </Description> </RDF> """.format(*versions) }).read()
def test_no_bump_unreviewed(self, mock_sign_file): """Don't bump nor sign unreviewed files.""" for status in (amo.UNREVIEWED_STATUSES + (amo.STATUS_BETA,)): self.file_.update(status=amo.STATUS_UNREVIEWED) fpath = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(fpath, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert not mock_sign_file.called self.version.reload() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') assert file_hash == self.file_.generate_hash() self.assert_no_backup()
def assert_not_signed(self, mock_sign_file, file_hash): assert not mock_sign_file.called self.version.reload() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') assert file_hash == self.file_.generate_hash() self.assert_no_backup()
def find_previous_version(addon, file, version_string, channel): """ Find the most recent previous version of this add-on, prior to `version`, that can be used to issue upgrade warnings. """ if not addon or not version_string: return statuses = [amo.STATUS_APPROVED] # Find all previous files of this add-on with the correct status and in # the right channel. qs = File.objects.filter( version__addon=addon, version__channel=channel, status__in=statuses) if file: # Add some extra filters if we're validating a File instance, # to try to get the closest possible match. qs = (qs.exclude(pk=file.pk) # Files which are not for the same platform, but have # other files in the same version which are. .exclude(~Q(platform=file.platform) & Q(version__files__platform=file.platform)) # Files which are not for either the same platform or for # all platforms, but have other versions in the same # version which are. .exclude(~Q(platform__in=(file.platform, amo.PLATFORM_ALL.id)) & Q(version__files__platform=amo.PLATFORM_ALL.id))) vint = version_int(version_string) for file_ in qs.order_by('-id'): # Only accept versions which come before the one we're validating. if (file_.version.version_int or 0) < vint: return file_
def get_plugins(apiver=3, app=None, appver=None): # API versions < 3 ignore targetApplication entries for plugins so only # block the plugin if the appver is within the block range. if app: app_query = (Q(app__isnull=True) | Q(app__guid=app) | Q(app__guid__isnull=True)) else: app_query = Q(app__isnull=True) | Q(app__isnull=False) plugins = (BlocklistPlugin.objects.no_cache().select_related('details') .filter(app_query) .extra(select={'app_guid': 'blapps.guid', 'app_min': 'blapps.min', 'app_max': 'blapps.max'})) if apiver < 3 and appver is not None: def between(ver, min, max): if not (min and max): return True return version_int(min) < ver < version_int(max) app_version = version_int(appver) plugins = [p for p in plugins if between(app_version, p.app_min, p.app_max)] return list(plugins)
def test_sign_bump_old_versions_default_compat(self, mock_sign_file): """Sign files which are old, but default to compatible.""" with amo.tests.copy_file( 'src/olympia/files/fixtures/files/jetpack.xpi', self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') self.set_max_appversion(settings.MIN_D2C_VERSION) tasks.sign_addons([self.addon.pk]) assert mock_sign_file.called self.version.reload() assert self.version.version == '1.3.1-signed' assert self.version.version_int == version_int('1.3.1-signed') assert file_hash != self.file_.generate_hash() self.assert_backup()
def test_sign_bump_non_ascii_version(self, mock_sign_file): """Sign versions which have non-ascii version numbers.""" self.version.update(version=u'é1.3') with amo.tests.copy_file( 'src/olympia/files/fixtures/files/jetpack.xpi', self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == u'é1.3' assert self.version.version_int == version_int('1.3') tasks.sign_addons([self.addon.pk]) assert mock_sign_file.called self.version.reload() assert self.version.version == u'é1.3.1-signed' assert self.version.version_int == version_int(u'é1.3.1-signed') assert file_hash != self.file_.generate_hash() self.assert_backup()
def annotate_new_legacy_addon_restrictions(results): """ Annotate validation results to restrict uploads of new legacy (non-webextension) add-ons if specific conditions are met. """ metadata = results.get('metadata', {}) target_apps = metadata.get('applications', {}) max_target_firefox_version = max( version_int(target_apps.get('firefox', {}).get('max', '')), version_int(target_apps.get('android', {}).get('max', '')) ) is_webextension = metadata.get('is_webextension') is True is_extension_type = metadata.get('is_extension') is True is_targeting_firefoxes_only = ( set(target_apps.keys()).intersection(('firefox', 'android')) == set(target_apps.keys()) ) is_targeting_firefox_lower_than_53_only = ( metadata.get('strict_compatibility') is True and # version_int('') is actually 200100. If strict compatibility is true, # the validator should have complained about the non-existant max # version, but it doesn't hurt to check that the value is sane anyway. max_target_firefox_version > 200100 and max_target_firefox_version < 53000000200100 ) if (is_extension_type and not is_webextension and is_targeting_firefoxes_only and not is_targeting_firefox_lower_than_53_only and waffle.switch_is_active('restrict-new-legacy-submissions')): msg = _(u'Starting with Firefox 53, new extensions on this site can ' u'only be WebExtensions.') messages = results['messages'] messages.insert(0, { 'tier': 1, 'type': 'error', 'id': ['validation', 'messages', 'legacy_extensions_restricted'], 'message': msg, 'description': [], 'compatibility_type': None }) results['errors'] += 1 return results
def test_sign_bump_new_versions_not_default_compat(self, mock_sign_file): """Sign files which are recent, event if not default to compatible.""" with amo.tests.copy_file( 'src/olympia/files/fixtures/files/jetpack.xpi', self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') self.file_.update(binary_components=True, strict_compatibility=True) tasks.sign_addons([self.addon.pk]) assert mock_sign_file.called self.version.reload() assert self.version.version == '1.3.1-signed' assert self.version.version_int == version_int('1.3.1-signed') assert file_hash != self.file_.generate_hash() self.assert_backup()
def filter_version(version, app_id): """ Returns filters that can be sent to ES for app version ranges. If the version is a alpha, beta, or pre-release this does an exact match. Otherwise it will query where max >= M.Na and min <= M.N. """ low = version_int(version) return {'appversion.%s.min__lte' % app_id: low}
def test_supports_firefox_recent_default_to_compatible(self): max_appversion = self.version.apps.first().max # Recent, default to compatible. max_appversion.update(version='37', version_int=version_int('37')) self.file_.update(binary_components=False, strict_compatibility=False) self.assert_not_signed() packaged.sign_file(self.file_) self.assert_signed()
def supports_firefox(file_obj): """Return True if the file support a high enough version of Firefox. We only sign files that are at least compatible with Firefox MIN_D2C_VERSION, or Firefox MIN_NOT_D2C_VERSION if they are not default to compatible. """ apps = file_obj.version.apps.all() if not file_obj.binary_components and not file_obj.strict_compatibility: # Version is "default to compatible". return apps.filter( max__application__in=SIGN_FOR_APPS, max__version_int__gte=version_int(settings.MIN_D2C_VERSION)) else: # Version isn't "default to compatible". return apps.filter( max__application__in=[amo.FIREFOX.id, amo.ANDROID.id], max__version_int__gte=version_int(settings.MIN_NOT_D2C_VERSION))
def test_supports_firefox_old_not_default_to_compatible(self): max_appversion = self.version.apps.first().max # Old, and not default to compatible. max_appversion.update(version='4', version_int=version_int('4')) self.file_.update(binary_components=True, strict_compatibility=True) self.assert_not_signed() packaged.sign_file(self.file_) self.assert_signed()
def test_supports_firefox_android_recent_not_default_to_compatible(self): max_appversion = self.version.apps.first().max # Recent, not default to compatible. max_appversion.update(application=amo.ANDROID.id, version='37', version_int=version_int('37')) self.file_.update(binary_components=True, strict_compatibility=True) self.assert_not_signed() packaged.sign_file(self.file_) self.assert_signed()
def test_supports_firefox_android_old_default_to_compatible(self): max_appversion = self.version.apps.first().max # Old, and default to compatible. max_appversion.update(application=amo.ANDROID.id, version='4', version_int=version_int('4')) self.file_.update(binary_components=False, strict_compatibility=False) self.assert_not_signed() packaged.sign_file(self.file_) self.assert_signed()
def test_version_int(): """Tests that version_int. Corrects our versions.""" assert version_int('3.5.0a1pre2') == 3050000001002 assert version_int('') == 200100 assert version_int('0') == 200100 assert version_int('*') == 99000000200100 assert version_int(MAXVERSION) == MAXVERSION assert version_int(MAXVERSION + 1) == MAXVERSION assert version_int('9999999') == MAXVERSION
def test_supports_firefox_old_not_default_to_compatible(self): max_appversion = self.version.apps.first().max # Old, and not default to compatible. max_appversion.update(version=settings.MIN_D2C_VERSION, version_int=version_int( settings.MIN_D2C_VERSION)) self.file_.update(binary_components=True, strict_compatibility=True) self.assert_not_signed() packaged.sign_file(self.file_, settings.SIGNING_SERVER) self.assert_not_signed()
def test_supports_firefox_android_recent_not_default_to_compatible(self): max_appversion = self.version.apps.first().max # Recent, not default to compatible. max_appversion.update(application=amo.ANDROID.id, version=settings.MIN_NOT_D2C_VERSION, version_int=version_int( settings.MIN_NOT_D2C_VERSION)) self.file_.update(binary_components=True, strict_compatibility=True) self.assert_not_signed() packaged.sign_file(self.file_, settings.SIGNING_SERVER) self.assert_signed()
def test_supports_firefox_android_old_default_to_compatible(self): max_appversion = self.version.apps.first().max # Old, and default to compatible. max_appversion.update(application=amo.ANDROID.id, version=settings.MIN_D2C_VERSION, version_int=version_int( settings.MIN_D2C_VERSION)) self.file_.update(binary_components=False, strict_compatibility=False) self.assert_not_signed() packaged.sign_file(self.file_, settings.SIGNING_SERVER) self.assert_signed()
def extract_compatibility_info(cls, version_obj): compatible_apps = {} for app, appver in version_obj.compatible_apps.items(): if appver: min_, max_ = appver.min.version_int, appver.max.version_int min_human, max_human = appver.min.version, appver.max.version if not version_obj.files.filter( strict_compatibility=True).exists(): # The files attached to this version are not using strict # compatibility, so the max version essentially needs to be # ignored - let's fake a super high one. We leave max_human # alone to leave the API representation intact. max_ = version_int('9999') else: # Fake wide compatibility for search tools and personas. min_, max_ = 0, version_int('9999') min_human, max_human = None, None compatible_apps[app.id] = { 'min': min_, 'min_human': min_human, 'max': max_, 'max_human': max_human, } return compatible_apps
def test_version_int(): """Tests that version_int. Corrects our versions.""" assert version_int('3.5.0a1pre2') == 3050000001002 assert version_int('') == 200100 assert version_int('0') == 200100 assert version_int('*') == 65535000000200100 assert version_int(MAX_VERSION_PART) == 65535000000200100 assert version_int(MAX_VERSION_PART + 1) == 65535000000200100
def extract_compatibility_info(cls, obj, version_obj): """Return compatibility info for the specified version_obj, as will be indexed in ES.""" compatible_apps = {} # <Version>.compatible_apps and <Addon>.compatible_apps have a subtle # difference: the latter handles addons with no compatibility info, # something the former can not do in a performant way easily (it # computes compatibility info in a transformer where it does not have # access to the parent addon without making additional queries). # Here, in the indexer, we have access to both already, so if we detect # that the add-on is not supposed to have compatibility information, we # use the implementation from Addon. if obj.type in amo.NO_COMPAT: source = obj else: source = version_obj for app, appver in source.compatible_apps.items(): if appver: min_, max_ = appver.min.version_int, appver.max.version_int min_human, max_human = appver.min.version, appver.max.version if not version_obj.files.filter( strict_compatibility=True).exists(): # The files attached to this version are not using strict # compatibility, so the max version essentially needs to be # ignored - let's fake a super high one. We leave max_human # alone to leave the API representation intact. max_ = version_int('9999') else: # Fake wide compatibility for search tools and personas. min_, max_ = 0, version_int('9999') min_human, max_human = None, None compatible_apps[app.id] = { 'min': min_, 'min_human': min_human, 'max': max_, 'max_human': max_human, } return compatible_apps
def extract_compatibility_info(cls, version_obj): compatible_apps = {} for app, appver in version_obj.compatible_apps.items(): if appver: min_, max_ = appver.min.version_int, appver.max.version_int min_human, max_human = appver.min.version, appver.max.version else: # Fake wide compatibility for search tools and personas. min_, max_ = 0, version_int('9999') min_human, max_human = None, None compatible_apps[app.id] = { 'min': min_, 'min_human': min_human, 'max': max_, 'max_human': max_human, } return compatible_apps
def test_dont_sign_dont_bump_other_applications(self, mock_sign_file): """Don't sign files which are for applications we don't sign for.""" path = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(path, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') apps_without_signing = [app for app in amo.APPS_ALL.keys() if app not in packaged.SIGN_FOR_APPS] for app in apps_without_signing: self.max_appversion.update(application=app) tasks.sign_addons([self.addon.pk]) self.assert_not_signed(mock_sign_file, file_hash)
def find_previous_version(addon, file, version_string, channel): """ Find the most recent previous version of this add-on, prior to `version`, that can be used to issue upgrade warnings. """ if not addon or not version_string: return is_version_beta = is_beta(version_string) statuses = [amo.STATUS_PUBLIC] if is_version_beta: # Only include beta versions if the version string passed corresponds # to a beta version. This is not perfect because even if the version # string *looks* like a beta, the developer might want to use it as a # regular listed upload, and in that case including previous betas in # the list is wrong, but it's the best we can do when we're dealing # with a FileUpload, since it's too early to know what the developer # intends to do. statuses.append(amo.STATUS_BETA) # Find all previous files of this add-on with the correct status and in # the right channel. qs = File.objects.filter(version__addon=addon, version__channel=channel, status__in=statuses) if file: # Add some extra filters if we're validating a File instance, # to try to get the closest possible match. qs = ( qs.exclude(pk=file.pk) # Files which are not for the same platform, but have # other files in the same version which are. .exclude(~Q(platform=file.platform) & Q(version__files__platform=file.platform)) # Files which are not for either the same platform or for # all platforms, but have other versions in the same # version which are. .exclude(~Q(platform__in=(file.platform, amo.PLATFORM_ALL.id)) & Q(version__files__platform=amo.PLATFORM_ALL.id))) vint = version_int(version_string) for file_ in qs.order_by('-id'): # Only accept versions which come before the one we're validating. if file_.version.version_int < vint: return file_
def get_plugins(apiver, app, appver=None): # API versions < 3 ignore targetApplication entries for plugins so only # block the plugin if the appver is within the block range. plugins = (BlocklistPlugin.objects.no_cache().select_related('details') .filter(Q(app__isnull=True) | Q(app__guid=app) | Q(app__guid__isnull=True)) .extra(select={'app_guid': 'blapps.guid', 'app_min': 'blapps.min', 'app_max': 'blapps.max'})) if apiver < 3 and appver is not None: def between(ver, min, max): if not (min and max): return True return version_int(min) < ver < version_int(max) app_version = version_int(appver) plugins = [p for p in plugins if between(app_version, p.app_min, p.app_max)] return list(plugins)
def test_dont_sign_dont_bump_old_versions(self, mock_sign_file): """Don't sign files which are too old, or not default to compatible.""" fpath = fpath = 'src/olympia/files/fixtures/files/jetpack.xpi' with amo.tests.copy_file(fpath, self.file_.file_path): file_hash = self.file_.generate_hash() assert self.version.version == '1.3' assert self.version.version_int == version_int('1.3') # Too old, don't sign. self.set_max_appversion('1') # Very very old. tasks.sign_addons([self.addon.pk]) self.assert_not_signed(mock_sign_file, file_hash) # MIN_D2C_VERSION, but strict compat: don't sign. self.set_max_appversion(settings.MIN_D2C_VERSION) self.file_.update(strict_compatibility=True) tasks.sign_addons([self.addon.pk]) self.assert_not_signed(mock_sign_file, file_hash) # MIN_D2C_VERSION, but binary component: don't sign. self.file_.update(strict_compatibility=False, binary_components=True) tasks.sign_addons([self.addon.pk]) self.assert_not_signed(mock_sign_file, file_hash)
def addon_filter(addons, addon_type, limit, app, platform, version, compat_mode='strict'): """ Filter addons by type, application, app version, and platform. Add-ons that support the current locale will be sorted to front of list. Shuffling will be applied to the add-ons supporting the locale and the others separately. Doing this in the database takes too long, so we do it in code and wrap it in generous caching. """ APP = app def partition(seq, key): """Group a sequence based into buckets by key(x).""" groups = itertools.groupby(sorted(seq, key=key), key=key) return ((k, list(v)) for k, v in groups) # Take out personas since they don't have versions. groups = dict(partition(addons, lambda x: x.type == amo.ADDON_PERSONA)) personas, addons = groups.get(True, []), groups.get(False, []) platform = platform.lower() if platform != 'all' and platform in amo.PLATFORM_DICT: def f(ps): return pid in ps or amo.PLATFORM_ALL in ps pid = amo.PLATFORM_DICT[platform] addons = [ a for a in addons if f(a.current_version.supported_platforms) ] if version is not None: vint = version_int(version) def f_strict(app): return app.min.version_int <= vint <= app.max.version_int def f_ignore(app): return app.min.version_int <= vint xs = [(a, a.compatible_apps) for a in addons] # Iterate over addons, checking compatibility depending on compat_mode. addons = [] for addon, apps in xs: app = apps.get(APP) if compat_mode == 'ignore': if app and f_ignore(app): addons.append(addon) # Put personas back in. addons.extend(personas) # We prefer add-ons that support the current locale. lang = get_language() def partitioner(x): return x.description is not None and (x.description.locale == lang) groups = dict(partition(addons, partitioner)) good, others = groups.get(True, []), groups.get(False, []) random.shuffle(good) random.shuffle(others) # If limit=0, we return all addons with `good` coming before `others`. # Otherwise pad `good` if less than the limit and return the limit. if limit > 0: if len(good) < limit: good.extend(others[:limit - len(good)]) return good[:limit] else: good.extend(others) return good
def fix_let_scope_bustage_in_addons(addon_ids): """Used to fix the "let scope bustage" (bug 1224686) in the last version of the provided add-ons. This is used in the 'fix_let_scope_bustage' management commands. It also bumps the version number of the file and the Version, so the Firefox extension update mechanism picks this new fixed version and installs it. """ log.info(u'[{0}] Fixing addons.'.format(len(addon_ids))) addons_emailed = [] for addon in Addon.objects.filter(id__in=addon_ids): # We only care about the latest added version for each add-on. version = addon.versions.first() log.info(u'Fixing addon {0}, version {1}'.format(addon, version)) bumped_version_number = u'{0}.1-let-fixed'.format(version.version) for file_obj in version.files.all(): if not os.path.isfile(file_obj.file_path): log.info(u'File {0} does not exist, skip'.format(file_obj.pk)) continue # Save the original file, before bumping the version. backup_path = u'{0}.backup_let_fix'.format(file_obj.file_path) shutil.copy(file_obj.file_path, backup_path) try: # Apply the fix itself. fix_let_scope_bustage_in_xpi(file_obj.file_path) except: log.error(u'Failed fixing file {0}'.format(file_obj.pk), exc_info=True) # Revert the fix by restoring the backup. shutil.move(backup_path, file_obj.file_path) continue # We move to the next file. # Need to bump the version (modify the manifest file) # before the file is signed. update_version_number(file_obj, bumped_version_number) if file_obj.is_signed: # Only sign if it was already signed. if file_obj.status == amo.STATUS_PUBLIC: server = settings.SIGNING_SERVER else: server = settings.PRELIMINARY_SIGNING_SERVER sign_file(file_obj, server) # Now update the Version model. version.update(version=bumped_version_number, version_int=version_int(bumped_version_number)) addon = version.addon if addon.pk not in addons_emailed: # Send a mail to the owners/devs warning them we've # automatically fixed their addon. qs = (AddonUser.objects .filter(role=amo.AUTHOR_ROLE_OWNER, addon=addon) .exclude(user__email__isnull=True)) emails = qs.values_list('user__email', flat=True) subject = MAIL_SUBJECT.format(addon=addon.name) message = MAIL_MESSAGE.format( addon=addon.name, addon_url=amo.helpers.absolutify( addon.get_dev_url(action='versions'))) amo.utils.send_mail( subject, message, recipient_list=emails, fail_silently=True, headers={'Reply-To': '*****@*****.**'}) addons_emailed.append(addon.pk)
def test_version_int_compare(): assert version_int('3.6.0.*') == version_int('3.6.0.99') assert version_int('3.6.*.0') == version_int('3.6.99') assert version_int('3.6.*') > version_int('3.6.8') assert version_int('3.6.*') > version_int('3.6.99.98') assert version_int('*') == version_int('65535.99.99.99') assert version_int('*.0') == version_int('65535') assert version_int('98.*') < version_int('*') assert version_int('5.*.0') == version_int('5.99') assert version_int('5.*') > version_int('5.0.*')
def extract_document(cls, obj): """Extract indexable attributes from an add-on.""" attrs = ('id', 'average_daily_users', 'bayesian_rating', 'created', 'default_locale', 'guid', 'hotness', 'is_disabled', 'is_listed', 'last_updated', 'public_stats', 'slug', 'status', 'type', 'weekly_downloads') data = {attr: getattr(obj, attr) for attr in attrs} if obj.type == amo.ADDON_PERSONA: try: # Boost on popularity. data['boost'] = obj.persona.popularity**.2 data['has_theme_rereview'] = ( obj.persona.rereviewqueuetheme_set.exists()) # 'weekly_downloads' field is used globally to sort, but # for themes weekly_downloads don't make much sense, use # popularity instead (FIXME: should be the other way around). data['weekly_downloads'] = obj.persona.popularity except ObjectDoesNotExist: # The instance won't have a persona while it's being created. pass else: # Boost by the number of users on a logarithmic scale. The maximum # boost (11,000,000 users for adblock) is about 5x. data['boost'] = obj.average_daily_users**.2 data['has_theme_rereview'] = None data['app'] = [app.id for app in obj.compatible_apps.keys()] data['appversion'] = {} for app, appver in obj.compatible_apps.items(): if appver: min_, max_ = appver.min.version_int, appver.max.version_int min_human, max_human = appver.min.version, appver.max.version else: # Fake wide compatibility for search tools and personas. min_, max_ = 0, version_int('9999') min_human, max_human = None, None data['appversion'][app.id] = { 'min': min_, 'min_human': min_human, 'max': max_, 'max_human': max_human, } data['authors'] = [a.name for a in obj.listed_authors] # Quadruple the boost if the add-on is public. if obj.status == amo.STATUS_PUBLIC and 'boost' in data: data['boost'] = max(data['boost'], 1) * 4 # We go through attach_categories and attach_tags transformer before # calling this function, it sets category_ids and tag_list. data['category'] = getattr(obj, 'category_ids', []) if obj.current_version: data['current_version'] = { 'id': obj.current_version.pk, 'files': [{ 'id': file_.id, 'created': file_.created, 'filename': file_.filename, 'hash': file_.hash, 'platform': file_.platform, 'size': file_.size, 'status': file_.status, } for file_ in obj.current_version.all_files], 'reviewed': obj.current_version.reviewed, 'version': obj.current_version.version, } data['has_version'] = True data['platforms'] = [ p.id for p in obj.current_version.supported_platforms ] else: data['has_version'] = None data['tags'] = getattr(obj, 'tag_list', []) # Handle localized fields. # First, deal with the 3 fields that need everything: for field in ('description', 'name', 'summary'): data.update(cls.extract_field_raw_translations(obj, field)) data.update(cls.extract_field_search_translations(obj, field)) data.update(cls.extract_field_analyzed_translations(obj, field)) # Then add fields that only need to be returned to the API without # contributing to search relevancy. for field in ('homepage', 'support_email', 'support_url'): data.update(cls.extract_field_raw_translations(obj, field)) # Finally, add the special sort field, coercing the current translation # into an unicode object first. data['name_sort'] = unicode(obj.name).lower() return data
def get_compat_mode(version): # Returns appropriate compat mode based on app version. # Replace when we are ready to deal with bug 711698. vint = version_int(version) return 'ignore' if vint >= version_int('10.0') else 'strict'
def find_compatible_version(addon, app_id, app_version=None, platform=None, compat_mode='strict'): """Returns the newest compatible version (ordered by version id desc) for the given addon.""" if not app_id: return None if platform: # We include platform_id=1 always in the SQL so we skip it here. platform = platform.lower() if platform != 'all' and platform in amo.PLATFORM_DICT: platform = amo.PLATFORM_DICT[platform].id else: platform = None log.debug(u'Checking compatibility for add-on ID:%s, APP:%s, V:%s, ' u'OS:%s, Mode:%s' % (addon.id, app_id, app_version, platform, compat_mode)) valid_file_statuses = ','.join(map(str, addon.valid_file_statuses)) data = { 'id': addon.id, 'app_id': app_id, 'platform': platform, 'valid_file_statuses': valid_file_statuses, 'channel': amo.RELEASE_CHANNEL_LISTED, } if app_version: data.update(version_int=version_int(app_version)) else: # We can't perform the search queries for strict or normal without # an app version. compat_mode = 'ignore' ns_key = cache_ns_key('d2c-versions:%s' % addon.id) cache_key = '%s:%s:%s:%s:%s' % (ns_key, app_id, app_version, platform, compat_mode) version_id = cache.get(cache_key) if version_id is not None: log.debug(u'Found compatible version in cache: %s => %s' % (cache_key, version_id)) if version_id == 0: return None else: try: return Version.objects.get(pk=version_id) except Version.DoesNotExist: pass raw_sql = [ """ SELECT versions.* FROM versions INNER JOIN addons ON addons.id = versions.addon_id AND addons.id = %(id)s INNER JOIN applications_versions ON applications_versions.version_id = versions.id INNER JOIN appversions appmin ON appmin.id = applications_versions.min AND appmin.application_id = %(app_id)s INNER JOIN appversions appmax ON appmax.id = applications_versions.max AND appmax.application_id = %(app_id)s INNER JOIN files ON files.version_id = versions.id AND (files.platform_id = 1""" ] if platform: raw_sql.append(' OR files.platform_id = %(platform)s') raw_sql.append(') WHERE files.status IN (%(valid_file_statuses)s) ') raw_sql.append(' AND versions.channel = %(channel)s ') if app_version: raw_sql.append('AND appmin.version_int <= %(version_int)s ') if compat_mode == 'ignore': pass # No further SQL modification required. elif compat_mode == 'normal': raw_sql.append("""AND CASE WHEN files.strict_compatibility = 1 OR files.binary_components = 1 THEN appmax.version_int >= %(version_int)s ELSE 1 END """) # Filter out versions that don't have the minimum maxVersion # requirement to qualify for default-to-compatible. d2c_max = amo.D2C_MAX_VERSIONS.get(app_id) if d2c_max: data['d2c_max_version'] = version_int(d2c_max) raw_sql.append("AND appmax.version_int >= %(d2c_max_version)s ") # Filter out versions found in compat overrides raw_sql.append("""AND NOT versions.id IN ( SELECT version_id FROM incompatible_versions WHERE app_id=%(app_id)s AND (min_app_version='0' AND max_app_version_int >= %(version_int)s) OR (min_app_version_int <= %(version_int)s AND max_app_version='*') OR (min_app_version_int <= %(version_int)s AND max_app_version_int >= %(version_int)s)) """) else: # Not defined or 'strict'. raw_sql.append('AND appmax.version_int >= %(version_int)s ') raw_sql.append('ORDER BY versions.id DESC LIMIT 1;') version = Version.objects.raw(''.join(raw_sql) % data) if version: version = version[0] version_id = version.id else: version = None version_id = 0 log.debug(u'Caching compat version %s => %s' % (cache_key, version_id)) cache.set(cache_key, version_id, None) return version
def test_extract_version_and_files(self): permissions = ['bookmarks', 'random permission'] optional_permissions = ['cookies', 'optional permission'] version = self.addon.current_version # Make the version a webextension and add a bunch of things to it to # test different scenarios. version.file.update(is_webextension=True) version.license = License.objects.create(name='My licensé', url='http://example.com/', builtin=0) [ WebextPermission.objects.create( file=version.file, permissions=permissions, optional_permissions=optional_permissions, ) ] version.save() # Now we can run the extraction and start testing. extracted = self._extract() assert extracted['current_version'] assert extracted['current_version']['id'] == version.pk # Because strict_compatibility is False, the max version we record in # the index is an arbitrary super high version. assert extracted['current_version']['compatible_apps'] == { FIREFOX.id: { 'min': 2000000200100, 'max': version_int('*'), 'max_human': '4.0', 'min_human': '2.0', } } assert extracted['current_version']['license'] == { 'builtin': 0, 'id': version.license.pk, 'name_translations': [{ 'lang': 'en-US', 'string': 'My licensé' }], 'url': 'http://example.com/', } assert extracted['current_version']['release_notes_translations'] == [ { 'lang': 'en-US', 'string': 'Fix for an important bug' }, { 'lang': 'fr', 'string': "Quelque chose en fran\xe7ais.\n\nQuelque chose d'autre.", }, ] assert extracted['current_version']['reviewed'] == version.reviewed assert extracted['current_version']['version'] == version.version extracted_file = extracted['current_version']['files'][0] assert extracted_file['id'] == version.file.pk assert extracted_file['created'] == version.file.created assert extracted_file['filename'] == version.file.filename assert extracted_file['hash'] == version.file.hash assert extracted_file['is_mozilla_signed_extension'] == ( version.file.is_mozilla_signed_extension) assert extracted_file['size'] == version.file.size assert extracted_file['status'] == version.file.status assert extracted_file['permissions'] == permissions assert extracted_file['optional_permissions'] == optional_permissions
def between(ver, min, max): if not (min and max): return True return version_int(min) < ver < version_int(max)
def addon_filter(addons, addon_type, limit, app, platform, version, compat_mode='strict', shuffle=True): """ Filter addons by type, application, app version, and platform. Add-ons that support the current locale will be sorted to front of list. Shuffling will be applied to the add-ons supporting the locale and the others separately. Doing this in the database takes too long, so we in code and wrap it in generous caching. """ APP = app if addon_type.upper() != 'ALL': try: addon_type = int(addon_type) if addon_type: addons = [a for a in addons if a.type == addon_type] except ValueError: # `addon_type` is ALL or a type id. Otherwise we ignore it. pass # Take out personas since they don't have versions. groups = dict(partition(addons, lambda x: x.type == amo.ADDON_PERSONA)) personas, addons = groups.get(True, []), groups.get(False, []) platform = platform.lower() if platform != 'all' and platform in amo.PLATFORM_DICT: def f(ps): return pid in ps or amo.PLATFORM_ALL in ps pid = amo.PLATFORM_DICT[platform] addons = [ a for a in addons if f(a.current_version.supported_platforms) ] if version is not None: vint = version_int(version) def f_strict(app): return app.min.version_int <= vint <= app.max.version_int def f_ignore(app): return app.min.version_int <= vint xs = [(a, a.compatible_apps) for a in addons] # Iterate over addons, checking compatibility depending on compat_mode. addons = [] for addon, apps in xs: app = apps.get(APP) if compat_mode == 'strict': if app and f_strict(app): addons.append(addon) elif compat_mode == 'ignore': if app and f_ignore(app): addons.append(addon) elif compat_mode == 'normal': # This does a db hit but it's cached. This handles the cases # for strict opt-in, binary components, and compat overrides. v = addon.compatible_version(APP.id, version, platform, compat_mode) if v: # There's a compatible version. addons.append(addon) # Put personas back in. addons.extend(personas) # We prefer add-ons that support the current locale. lang = get_language() def partitioner(x): return x.description is not None and (x.description.locale == lang) groups = dict(partition(addons, partitioner)) good, others = groups.get(True, []), groups.get(False, []) if shuffle: random.shuffle(good) random.shuffle(others) # If limit=0, we return all addons with `good` coming before `others`. # Otherwise pad `good` if less than the limit and return the limit. if limit > 0: if len(good) < limit: good.extend(others[:limit - len(good)]) return good[:limit] else: good.extend(others) return good
def sign_addons(addon_ids, force=False, **kw): """Used to sign all the versions of an addon. This is used in the 'process_addons --task resign_addons_for_cose' management command. It also bumps the version number of the file and the Version, so the Firefox extension update mechanism picks this new signed version and installs it. """ log.info(u'[{0}] Signing addons.'.format(len(addon_ids))) mail_subject, mail_message = MAIL_COSE_SUBJECT, MAIL_COSE_MESSAGE # query everything except for search-plugins as they're generally # not signed current_versions = (Addon.objects.filter(id__in=addon_ids).values_list( '_current_version', flat=True)) qset = Version.objects.filter(id__in=current_versions) addons_emailed = set() for version in qset: # We only sign files that have been reviewed to_sign = version.files.filter(status__in=amo.REVIEWED_STATUSES) to_sign = to_sign.all() if not to_sign: log.info(u'Not signing addon {0}, version {1} (no files)'.format( version.addon, version)) log.info(u'Signing addon {0}, version {1}'.format( version.addon, version)) bumped_version_number = get_new_version_number(version.version) signed_at_least_a_file = False # Did we sign at least one file? # We haven't cleared the database yet to ensure that there's only # one file per WebExtension, so we're going through all files just # to be sure. for file_obj in to_sign: if not os.path.isfile(file_obj.file_path): log.info(u'File {0} does not exist, skip'.format(file_obj.pk)) continue # Save the original file, before bumping the version. backup_path = u'{0}.backup_signature'.format(file_obj.file_path) shutil.copy(file_obj.file_path, backup_path) try: # Need to bump the version (modify manifest file) # before the file is signed. update_version_number(file_obj, bumped_version_number) signed = bool(sign_file(file_obj)) if signed: # Bump the version number if at least one signed. signed_at_least_a_file = True else: # We didn't sign, so revert the version bump. shutil.move(backup_path, file_obj.file_path) except Exception: log.error(u'Failed signing file {0}'.format(file_obj.pk), exc_info=True) # Revert the version bump, restore the backup. shutil.move(backup_path, file_obj.file_path) # Now update the Version model, if we signed at least one file. if signed_at_least_a_file: version.update(version=bumped_version_number, version_int=version_int(bumped_version_number)) addon = version.addon if addon.pk not in addons_emailed: # Send a mail to the owners/devs warning them we've # automatically signed their addon. qs = (AddonUser.objects.filter( role=amo.AUTHOR_ROLE_OWNER, addon=addon).exclude(user__email__isnull=True)) emails = qs.values_list('user__email', flat=True) subject = mail_subject message = mail_message.format(addon=addon.name) amo.utils.send_mail( subject, message, recipient_list=emails, headers={'Reply-To': '*****@*****.**'}) addons_emailed.add(addon.pk)
def save(self, *args, **kw): if not self.version_int: self.version_int = compare.version_int(self.version) return super(AppVersion, self).save(*args, **kw)
def file_supports_firefox(version): """Return a Q object: files supporting at least a firefox version.""" return Q(version__apps__max__application__in=SIGN_FOR_APPS, version__apps__max__version_int__gte=version_int(version))
def sign_addons(addon_ids, force=False, **kw): """Used to sign all the versions of an addon. This is used in the 'sign_addons' and 'process_addons --task sign_addons' management commands. It also bumps the version number of the file and the Version, so the Firefox extension update mechanism picks this new signed version and installs it. """ log.info(u'[{0}] Signing addons.'.format(len(addon_ids))) reasons = { 'default': [MAIL_SUBJECT, MAIL_MESSAGE], 'expiry': [MAIL_EXPIRY_SUBJECT, MAIL_EXPIRY_MESSAGE] } mail_subject, mail_message = reasons[kw.get('reason', 'default')] def file_supports_firefox(version): """Return a Q object: files supporting at least a firefox version.""" return Q(version__apps__max__application__in=SIGN_FOR_APPS, version__apps__max__version_int__gte=version_int(version)) is_default_compatible = Q(binary_components=False, strict_compatibility=False) # We only want to sign files that are at least compatible with Firefox # MIN_D2C_VERSION, or Firefox MIN_NOT_D2C_VERSION if they are not default # to compatible. # The signing feature should be supported from Firefox 40 and above, but # we're still signing some files that are a bit older just in case. ff_version_filter = ( (is_default_compatible & file_supports_firefox(settings.MIN_D2C_VERSION)) | (~is_default_compatible & file_supports_firefox(settings.MIN_NOT_D2C_VERSION))) addons_emailed = set() # We only care about extensions. for version in Version.objects.filter(addon_id__in=addon_ids, addon__type=amo.ADDON_EXTENSION): # We only sign files that have been reviewed and are compatible with # versions of Firefox that are recent enough. to_sign = version.files.filter(ff_version_filter, status__in=amo.REVIEWED_STATUSES) if force: to_sign = to_sign.all() else: to_sign = to_sign.filter(is_signed=False) if not to_sign: log.info(u'Not signing addon {0}, version {1} (no files or already' u' signed)'.format(version.addon, version)) log.info(u'Signing addon {0}, version {1}'.format(version.addon, version)) bumped_version_number = get_new_version_number(version.version) signed_at_least_a_file = False # Did we sign at least one file? for file_obj in to_sign: if not os.path.isfile(file_obj.file_path): log.info(u'File {0} does not exist, skip'.format(file_obj.pk)) continue # Save the original file, before bumping the version. backup_path = u'{0}.backup_signature'.format(file_obj.file_path) shutil.copy(file_obj.file_path, backup_path) try: # Need to bump the version (modify manifest file) # before the file is signed. update_version_number(file_obj, bumped_version_number) server = settings.SIGNING_SERVER signed = bool(sign_file(file_obj, server)) if signed: # Bump the version number if at least one signed. signed_at_least_a_file = True else: # We didn't sign, so revert the version bump. shutil.move(backup_path, file_obj.file_path) except: log.error(u'Failed signing file {0}'.format(file_obj.pk), exc_info=True) # Revert the version bump, restore the backup. shutil.move(backup_path, file_obj.file_path) # Now update the Version model, if we signed at least one file. if signed_at_least_a_file: version.update(version=bumped_version_number, version_int=version_int(bumped_version_number)) addon = version.addon if addon.pk not in addons_emailed: # Send a mail to the owners/devs warning them we've # automatically signed their addon. qs = (AddonUser.objects .filter(role=amo.AUTHOR_ROLE_OWNER, addon=addon) .exclude(user__email__isnull=True)) emails = qs.values_list('user__email', flat=True) subject = mail_subject.format(addon=addon.name) message = mail_message.format( addon=addon.name, addon_url=amo.helpers.absolutify( addon.get_dev_url(action='versions'))) amo.utils.send_mail( subject, message, recipient_list=emails, fail_silently=True, headers={'Reply-To': '*****@*****.**'}) addons_emailed.add(addon.pk)
def annotate_legacy_addon_restrictions(results, is_new_upload): """ Annotate validation results to restrict uploads of legacy (non-webextension) add-ons if specific conditions are met. """ metadata = results.get('metadata', {}) target_apps = metadata.get('applications', {}) max_target_firefox_version = max( version_int(target_apps.get('firefox', {}).get('max', '')), version_int(target_apps.get('android', {}).get('max', ''))) is_webextension = metadata.get('is_webextension') is True is_extension_or_complete_theme = ( # Note: annoyingly, `detected_type` is at the root level, not under # `metadata`. results.get('detected_type') in ('theme', 'extension')) is_targeting_firefoxes_only = (set(target_apps.keys()).intersection( ('firefox', 'android')) == set(target_apps.keys())) is_targeting_thunderbird_or_seamonkey_only = (set( target_apps.keys()).intersection( ('thunderbird', 'seamonkey')) == set(target_apps.keys())) is_targeting_firefox_lower_than_53_only = ( metadata.get('strict_compatibility') is True and # version_int('') is actually 200100. If strict compatibility is true, # the validator should have complained about the non-existant max # version, but it doesn't hurt to check that the value is sane anyway. max_target_firefox_version > 200100 and max_target_firefox_version < 53000000000000) is_targeting_firefox_higher_or_equal_than_57 = ( max_target_firefox_version >= 57000000000000 and max_target_firefox_version < 99000000000000) # Thunderbird/Seamonkey only add-ons are moving to addons.thunderbird.net. if (waffle.switch_is_active('disallow-thunderbird-and-seamonkey') and is_targeting_thunderbird_or_seamonkey_only): msg = ugettext( u'Add-ons for Thunderbird and SeaMonkey are now listed and ' u'maintained on addons.thunderbird.net. You can use the same ' u'account to update your add-ons on the new site.') insert_validation_message(results, message=msg, msg_id='thunderbird_and_seamonkey_migration') # New legacy add-ons targeting Firefox only must target Firefox 53 or # lower, strictly. Extensions targeting multiple other apps are exempt from # this. elif (is_new_upload and is_extension_or_complete_theme and not is_webextension and is_targeting_firefoxes_only and not is_targeting_firefox_lower_than_53_only): msg = ugettext( u'Starting with Firefox 53, new add-ons on this site can ' u'only be WebExtensions.') insert_validation_message(results, message=msg, msg_id='legacy_addons_restricted') # All legacy add-ons (new or upgrades) targeting Firefox must target # Firefox 56.* or lower, even if they target multiple apps. elif (is_extension_or_complete_theme and not is_webextension and is_targeting_firefox_higher_or_equal_than_57): # Note: legacy add-ons targeting '*' (which is the default for sdk # add-ons) are excluded from this error, and instead are silently # rewritten as supporting '56.*' in the manifest parsing code. msg = ugettext( u'Legacy add-ons are not compatible with Firefox 57 or higher. ' u'Use a maxVersion of 56.* or lower.') insert_validation_message(results, message=msg, msg_id='legacy_addons_max_version') return results
def test_version_int_unicode(): assert version_int(u'\u2322 ugh stephend') == 200100
from celery import chord, group from olympia import amo from olympia.addons.models import Addon from olympia.addons.tasks import ( add_dynamic_theme_tag, add_firefox57_tag, bump_appver_for_legacy_addons, disable_legacy_files, find_inconsistencies_between_es_and_db, migrate_legacy_dictionaries_to_webextension, migrate_lwts_to_static_themes, migrate_webextensions_to_git_storage, remove_amo_links_in_url_fields) from olympia.amo.utils import chunked from olympia.devhub.tasks import get_preview_sizes, recreate_previews from olympia.lib.crypto.tasks import sign_addons from olympia.reviewers.tasks import recalculate_post_review_weight from olympia.versions.compare import version_int firefox_56_star = version_int('56.*') tasks = { 'find_inconsistencies_between_es_and_db': { 'method': find_inconsistencies_between_es_and_db, 'qs': [] }, 'get_preview_sizes': { 'method': get_preview_sizes, 'qs': [] }, 'recalculate_post_review_weight': { 'method': recalculate_post_review_weight, 'qs': [Q(_current_version__autoapprovalsummary__verdict=amo.AUTO_APPROVED)] },
def test_version_asterix_compare(): assert version_int('*') == version_int('99') assert version_int('98.*') < version_int('*') assert version_int('5.*') == version_int('5.99') assert version_int('5.*') > version_int('5.0.*')
def set_max_appversion(self, version): """Set self.max_appversion to the given version.""" self.max_appversion.update(version=version, version_int=version_int(version))
def test_version_int_compare(): assert version_int('3.6.*') == version_int('3.6.99') assert version_int('3.6.*') > version_int('3.6.8')
def test_extract_version_and_files(self): permissions = ['bookmarks', 'random permission'] optional_permissions = ['cookies', 'optional permission'] version = self.addon.current_version # Make the version a webextension and add a bunch of things to it to # test different scenarios. version.all_files[0].update(is_webextension=True) file_factory(version=version, platform=PLATFORM_MAC.id, is_webextension=True) del version.all_files version.license = License.objects.create(name=u'My licensé', url='http://example.com/', builtin=0) [ WebextPermission.objects.create( file=file_, permissions=permissions, optional_permissions=optional_permissions) for file_ in version.all_files ] version.save() # Now we can run the extraction and start testing. extracted = self._extract() assert extracted['current_version'] assert extracted['current_version']['id'] == version.pk # Because strict_compatibility is False, the max version we record in # the index is an arbitrary super high version. assert extracted['current_version']['compatible_apps'] == { FIREFOX.id: { 'min': 2000000200100, 'max': version_int('*'), 'max_human': '4.0', 'min_human': '2.0', } } assert extracted['current_version']['license'] == { 'builtin': 0, 'id': version.license.pk, 'name_translations': [{ 'lang': u'en-US', 'string': u'My licensé' }], 'url': u'http://example.com/' } assert extracted['current_version']['release_notes_translations'] == [ { 'lang': 'en-US', 'string': u'Fix for an important bug' }, { 'lang': 'fr', 'string': u"Quelque chose en fran\xe7ais." u"\n\nQuelque chose d'autre." }, ] assert extracted['current_version']['reviewed'] == version.reviewed assert extracted['current_version']['version'] == version.version for index, file_ in enumerate(version.all_files): extracted_file = extracted['current_version']['files'][index] assert extracted_file['id'] == file_.pk assert extracted_file['created'] == file_.created assert extracted_file['filename'] == file_.filename assert extracted_file['hash'] == file_.hash assert extracted_file['is_webextension'] == file_.is_webextension assert extracted_file['is_restart_required'] == ( file_.is_restart_required) assert extracted_file['is_mozilla_signed_extension'] == ( file_.is_mozilla_signed_extension) assert extracted_file['platform'] == file_.platform assert extracted_file['size'] == file_.size assert extracted_file['status'] == file_.status assert (extracted_file['permissions'] == permissions) assert ( extracted_file['optional_permissions'] == optional_permissions) assert set(extracted['platforms']) == set( [PLATFORM_MAC.id, PLATFORM_ALL.id])
def test_latest_public_compatible_with(self): # Add compatible add-ons. We're going to request versions compatible # with 58.0. compatible_pack1 = addon_factory( name='Spanish Language Pack', type=amo.ADDON_LPAPP, target_locale='es', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '57.0', 'max_app_version': '57.*' }) compatible_pack1.current_version.update(created=self.days_ago(2)) compatible_version1 = version_factory( addon=compatible_pack1, file_kw={'strict_compatibility': True}, min_app_version='58.0', max_app_version='58.*') compatible_version1.update(created=self.days_ago(1)) compatible_pack2 = addon_factory( name='French Language Pack', type=amo.ADDON_LPAPP, target_locale='fr', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '58.0', 'max_app_version': '58.*' }) compatible_version2 = compatible_pack2.current_version compatible_version2.update(created=self.days_ago(2)) version_factory(addon=compatible_pack2, file_kw={'strict_compatibility': True}, min_app_version='59.0', max_app_version='59.*') # Add a more recent version for both add-ons, that would be compatible # with 58.0, but is not public/listed so should not be returned. version_factory(addon=compatible_pack1, file_kw={'strict_compatibility': True}, min_app_version='58.0', max_app_version='58.*', channel=amo.RELEASE_CHANNEL_UNLISTED) version_factory(addon=compatible_pack2, file_kw={ 'strict_compatibility': True, 'status': amo.STATUS_DISABLED }, min_app_version='58.0', max_app_version='58.*') # And for the first pack, add a couple of versions that are also # compatible. They are older so should appear after. extra_compatible_version_1 = version_factory( addon=compatible_pack1, file_kw={'strict_compatibility': True}, min_app_version='58.0', max_app_version='58.*') extra_compatible_version_1.update(created=self.days_ago(3)) extra_compatible_version_2 = version_factory( addon=compatible_pack1, file_kw={'strict_compatibility': True}, min_app_version='58.0', max_app_version='58.*') extra_compatible_version_2.update(created=self.days_ago(4)) # Add a few of incompatible add-ons. incompatible_pack1 = addon_factory( name='German Language Pack (incompatible with 58.0)', type=amo.ADDON_LPAPP, target_locale='fr', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '56.0', 'max_app_version': '56.*' }) version_factory(addon=incompatible_pack1, file_kw={'strict_compatibility': True}, min_app_version='59.0', max_app_version='59.*') addon_factory(name='Italian Language Pack (incompatible with 58.0)', type=amo.ADDON_LPAPP, target_locale='it', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '59.0', 'max_app_version': '59.*' }) addon_factory(name='Thunderbird Polish Language Pack', type=amo.ADDON_LPAPP, target_locale='pl', file_kw={'strict_compatibility': True}, version_kw={ 'application': amo.THUNDERBIRD.id, 'min_app_version': '58.0', 'max_app_version': '58.*' }) # Even add a pack with a compatible version... not public. And another # one with a compatible version... not listed. incompatible_pack2 = addon_factory( name='Japanese Language Pack (public, but 58.0 version is not)', type=amo.ADDON_LPAPP, target_locale='ja', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '57.0', 'max_app_version': '57.*' }) version_factory(addon=incompatible_pack2, min_app_version='58.0', max_app_version='58.*', file_kw={ 'status': amo.STATUS_AWAITING_REVIEW, 'strict_compatibility': True }) incompatible_pack3 = addon_factory( name='Nederlands Language Pack (58.0 version is unlisted)', type=amo.ADDON_LPAPP, target_locale='ja', file_kw={'strict_compatibility': True}, version_kw={ 'min_app_version': '57.0', 'max_app_version': '57.*' }) version_factory(addon=incompatible_pack3, min_app_version='58.0', max_app_version='58.*', channel=amo.RELEASE_CHANNEL_UNLISTED, file_kw={'strict_compatibility': True}) appversions = { 'min': version_int('58.0'), 'max': version_int('58.0a'), } qs = Version.objects.latest_public_compatible_with( amo.FIREFOX.id, appversions) expected_versions = [ compatible_version1, compatible_version2, extra_compatible_version_1, extra_compatible_version_2 ] assert list(qs) == expected_versions