예제 #1
0
class CollectionAddon(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(Addon)
    collection = models.ForeignKey(Collection)
    # category (deprecated: for "Fashion Your Firefox")
    comments = LinkifiedField(null=True)
    user = models.ForeignKey(UserProfile, null=True)

    ordering = models.PositiveIntegerField(
        default=0,
        help_text='Add-ons are displayed in ascending order '
                  'based on this field.')

    class Meta(ModelBase.Meta):
        db_table = 'addons_collections'
        unique_together = (('addon', 'collection'),)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        """Update Collection.addon_count and reindex add-on if the collection
        is featured."""
        from . import tasks
        tasks.collection_meta.delay(instance.collection_id)

    @staticmethod
    def post_delete(sender, instance, **kwargs):
        CollectionAddon.post_save(sender, instance, **kwargs)
        if instance.collection.is_featured():
            # The helpers .add_addon() and .remove_addon() already call .save()
            # on the collection, triggering update_featured_status() among
            # other things. However, this only takes care of the add-ons
            # present in the collection at the time, we also need to make sure
            # to invalidate add-ons that have been removed.
            Collection.update_featured_status(
                sender, instance.collection,
                addons=[instance.addon.pk], **kwargs)
예제 #2
0
class ApplicationsVersions(models.Model):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id')
    version = models.ForeignKey(
        Version, related_name='apps', on_delete=models.CASCADE)
    min = models.ForeignKey(AppVersion, db_column='min',
                            related_name='min_set')
    max = models.ForeignKey(AppVersion, db_column='max',
                            related_name='max_set')

    class Meta:
        db_table = u'applications_versions'
        unique_together = (("application", "version"),)

    def get_application_display(self):
        return six.text_type(amo.APPS_ALL[self.application].pretty)

    def get_latest_application_version(self):
        return (
            AppVersion.objects
            .filter(
                ~models.Q(version__contains='*'),
                application=self.application)
            .order_by('-version_int')
            .first())

    def __unicode__(self):
        if (self.version.is_compatible_by_default and
                self.version.is_compatible_app(amo.APP_IDS[self.application])):
            return ugettext(u'{app} {min} and later').format(
                app=self.get_application_display(),
                min=self.min
            )
        return u'%s %s - %s' % (self.get_application_display(),
                                self.min, self.max)
예제 #3
0
class IPNetworkUserRestriction(ModelBase):
    id = PositiveAutoField(primary_key=True)
    network = CIDRField(
        blank=True,
        null=True,
        help_text=_(
            'Enter a valid IPv6 or IPv6 CIDR network range, eg. 127.0.0.1/28'))

    error_message = _('Multiple add-ons violating our policies have been'
                      ' submitted from your location. The IP address has been'
                      ' blocked.')

    class Meta:
        db_table = 'users_user_network_restriction'

    def __str__(self):
        return str(self.network)

    @classmethod
    def allow_request(cls, request):
        """
        Return whether the specified request should be allowed to submit
        add-ons.
        """
        try:
            remote_addr = ipaddress.ip_address(request.META.get('REMOTE_ADDR'))
        except ValueError:
            # If we don't have a valid ip address, let's deny
            return False

        restrictions = IPNetworkUserRestriction.objects.all()

        for restriction in restrictions:
            if remote_addr in restriction.network:
                return False
        return True
예제 #4
0
class AppVersion(ModelBase):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=APPS_CHOICES,
                                              db_column='application_id')
    version = models.CharField(max_length=255, default='')
    version_int = models.BigIntegerField(editable=False)

    class Meta:
        db_table = 'appversions'
        ordering = ['-version_int']
        unique_together = ('application', 'version')

    def save(self, *args, **kw):
        if not self.version_int:
            self.version_int = compare.version_int(self.version)
        return super(AppVersion, self).save(*args, **kw)

    def __init__(self, *args, **kwargs):
        super(AppVersion, self).__init__(*args, **kwargs)
        # Add all the major, minor, ..., version attributes to the object.
        self.__dict__.update(compare.version_dict(self.version or ''))

    def __unicode__(self):
        return self.version
예제 #5
0
class License(ModelBase):
    OTHER = 0

    id = PositiveAutoField(primary_key=True)
    name = TranslatedField()
    url = models.URLField(null=True)
    builtin = models.PositiveIntegerField(default=OTHER)
    text = LinkifiedField()
    on_form = models.BooleanField(
        default=False, help_text='Is this a license choice in the devhub?')

    objects = LicenseManager()

    class Meta:
        db_table = 'licenses'
        indexes = [models.Index(fields=('builtin', ), name='builtin_idx')]

    def __str__(self):
        license = self._constant or self
        return str(license.name)

    @property
    def _constant(self):
        return LICENSES_BY_BUILTIN.get(self.builtin)

    @property
    def creative_commons(self):
        return bool((constant := self._constant) and constant.creative_commons)

    @property
    def icons(self):
        return ((constant := self._constant) and constant.icons) or ''

    @property
    def slug(self):
        return ((constant := self._constant) and constant.slug) or None
예제 #6
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'
        indexes = [
            models.Index(fields=('created', 'version'), name='created_idx'),
            models.Index(fields=('binary_components', ),
                         name='files_cedd2560'),
            models.Index(fields=('datestatuschanged', 'version'),
                         name='statuschanged_idx'),
            models.Index(fields=('status', ), name='status'),
        ]

    def __str__(self):
        return str(self.id)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, attachment=False):
        return self._make_download_url('downloads.file', attachment=attachment)

    def _make_download_url(self, view_name, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return url

    @classmethod
    def from_upload(cls, upload, version, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version and the parsed_data
        generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version)
        upload_path = force_str(nfd_str(upload.path))
        ext = force_str(os.path.splitext(upload_path)[1])
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.save()

        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            optional_permissions = list(
                parsed_data.get('optional_permissions', []))

            # devtools_page isn't in permissions block but treated as one
            # if a custom devtools page is added by an addon
            if 'devtools_page' in parsed_data:
                permissions.append('devtools')

            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions or optional_permissions:
                WebextPermission.objects.create(
                    permissions=permissions,
                    optional_permissions=optional_permissions,
                    file=file_,
                )

        log.info('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        with open(filename or self.current_file_path, 'rb') as fobj:
            return 'sha256:{}'.format(get_sha256(fobj))

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}
        (-{platform} for some of the old ones from back when we had multiple
         platforms)
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return '%s...%s' % (m.group('slug')[0:(maxlen - 3)], m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(
            reverse('downloads.latest', kwargs=kw),
            'addon-%s-latest%s' % (addon.pk, self.extension),
        )

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def fallback_file_path(self):
        """Fallback path in case the file was disabled/re-enabled and not yet
        moved - sort of the opposite to current_file_path. This should only be
        used for things like code search or git extraction where we really want
        the file contents no matter what."""
        return (self.file_path if self.current_file_path
                == self.guarded_file_path else self.guarded_file_path)

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source_path, destination_path, log_message):
        """Move a file from `source_path` to `destination_path` and delete the
        source directory if it's empty once the file has been successfully
        moved.

        Meant to move files from/to the guarded file path as they are disabled
        or re-enabled.

        IOError and UnicodeEncodeError are caught and logged."""
        log_message = force_str(log_message)
        try:
            if storage.exists(source_path):
                source_parent_path = os.path.dirname(source_path)
                log.info(
                    log_message.format(source=source_path,
                                       destination=destination_path))
                move_stored_file(source_path, destination_path)
                # Now that the file has been deleted, remove the directory if
                # it exists to prevent the main directory from growing too
                # much (#11464)
                remaining_dirs, remaining_files = storage.listdir(
                    source_parent_path)
                if len(remaining_dirs) == len(remaining_files) == 0:
                    storage.delete(source_parent_path)
        except (UnicodeEncodeError, IOError):
            msg = 'Move Failure: {} {}'.format(source_path, destination_path)
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a file from the public path to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        """Move a file from guarded file path to the public file path."""
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    @cached_property
    def permissions(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []

    @cached_property
    def optional_permissions(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate optional permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.optional_permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []
예제 #7
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    platform = models.PositiveIntegerField(
        choices=amo.SUPPORTED_PLATFORMS_CHOICES,
        default=amo.PLATFORM_ALL.id,
        db_column="platform_id")
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    jetpack_version = models.CharField(max_length=10, null=True, blank=True)
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    # The XPI contains JS that calls require("chrome").
    requires_chrome = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False, db_index=True)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file a multi-package?
    #     https://developer.mozilla.org/en-US/docs/Multiple_Item_Packaging
    is_multi_package = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'

    def __unicode__(self):
        return unicode(self.id)

    def get_platform_display(self):
        return force_text(amo.PLATFORMS[self.platform].name)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, src, attachment=False):
        return self._make_download_url('downloads.file',
                                       src,
                                       attachment=attachment)

    def _make_download_url(self, view_name, src, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return absolutify(urlparams(url, src=src))

    @classmethod
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload.path, file_.current_file_path)

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_

    @classmethod
    def get_jetpack_metadata(cls, path):
        data = {'sdkVersion': None}
        try:
            zip_ = zipfile.ZipFile(path)
        except (zipfile.BadZipfile, IOError):
            # This path is not an XPI. It's probably an app manifest.
            return data
        if 'package.json' in zip_.namelist():
            data['sdkVersion'] = "jpm"
        else:
            name = 'harness-options.json'
            if name in zip_.namelist():
                try:
                    opts = json.load(zip_.open(name))
                except ValueError as exc:
                    log.info('Could not parse harness-options.json in %r: %s' %
                             (path, exc))
                else:
                    data['sdkVersion'] = opts.get('sdkVersion')
        return data

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        hash = hashlib.sha256()
        with open(filename or self.current_file_path, 'rb') as obj:
            for chunk in iter(lambda: obj.read(1024), ''):
                hash.update(chunk)
        return 'sha256:%s' % hash.hexdigest()

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}-{platform}
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        if self.platform and self.platform != amo.PLATFORM_ALL.id:
            parts.append(amo.PLATFORMS[self.platform].shortname)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return u'%s...%s' % (m.group('slug')[0:(maxlen - 3)],
                             m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if self.platform != amo.PLATFORM_ALL.id:
            kw['platform'] = self.platform
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(reverse('downloads.latest', kwargs=kw),
                            'addon-%s-latest%s' % (addon.pk, self.extension))

    def eula_url(self):
        return reverse('addons.eula', args=[self.version.addon_id, self.id])

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source, destination, log_message):
        """Move a file from `source` to `destination`."""
        # Make sure we are passing bytes to Python's io system.
        source, destination = force_bytes(source), force_bytes(destination)

        try:
            if storage.exists(source):
                log.info(
                    log_message.format(source=source, destination=destination))
                move_stored_file(source, destination)
        except (UnicodeEncodeError, IOError):
            msg = 'Move Failure: {} {}'.format(force_bytes(source),
                                               force_bytes(destination))
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a disabled file to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    _get_localepicker = re.compile('^locale browser ([\w\-_]+) (.*)$', re.M)

    @memoize(prefix='localepicker', timeout=None)
    def get_localepicker(self):
        """
        For a file that is part of a language pack, extract
        the chrome/localepicker.properties file and return as
        a string.
        """
        start = time.time()

        try:
            zip = SafeZip(self.file_path)
        except (zipfile.BadZipfile, IOError):
            return ''

        try:
            manifest = zip.read('chrome.manifest')
        except KeyError as e:
            log.info('No file named: chrome.manifest in file: %s' % self.pk)
            return ''

        res = self._get_localepicker.search(manifest)
        if not res:
            log.error('Locale browser not in chrome.manifest: %s' % self.pk)
            return ''

        try:
            p = res.groups()[1]
            if 'localepicker.properties' not in p:
                p = os.path.join(p, 'localepicker.properties')
            res = zip.extract_from_manifest(p)
        except (zipfile.BadZipfile, IOError) as e:
            log.error('Error unzipping: %s, %s in file: %s' % (p, e, self.pk))
            return ''
        except (ValueError, KeyError) as e:
            log.error('No file named: %s in file: %s' % (e, self.pk))
            return ''

        end = time.time() - start
        log.info('Extracted localepicker file: %s in %.2fs' % (self.pk, end))
        statsd.timing('files.extract.localepicker', (end * 1000))
        return res

    @property
    def webext_permissions(self):
        """Return permissions that should be displayed, with descriptions, in
        defined order:
        1) Either the match all permission, if present (e.g. <all-urls>), or
           match urls for sites (<all-urls> takes preference over match urls)
        2) nativeMessaging permission, if present
        3) other known permissions in alphabetical order
        """
        knowns = list(
            WebextPermissionDescription.objects.filter(
                name__in=self.webext_permissions_list))

        urls = []
        match_url = None
        for name in self.webext_permissions_list:
            if re.match(WebextPermissionDescription.MATCH_ALL_REGEX, name):
                match_url = WebextPermissionDescription.ALL_URLS_PERMISSION
            elif name == WebextPermission.NATIVE_MESSAGING_NAME:
                # Move nativeMessaging to front of the list
                for index, perm in enumerate(knowns):
                    if perm.name == WebextPermission.NATIVE_MESSAGING_NAME:
                        knowns.pop(index)
                        knowns.insert(0, perm)
                        break
            elif '//' in name:
                # Filter out match urls so we can group them.
                urls.append(name)
            # Other strings are unknown permissions we don't care about

        if match_url is None and len(urls) == 1:
            match_url = Permission(
                u'single-match',
                ugettext(u'Access your data for {name}').format(name=urls[0]))
        elif match_url is None and len(urls) > 1:
            details = (u'<details><summary>{copy}</summary><ul>{sites}</ul>'
                       u'</details>')
            copy = ugettext(u'Access your data on the following websites:')
            sites = ''.join(
                [u'<li>%s</li>' % jinja2_escape(name) for name in urls])
            match_url = Permission(
                u'multiple-match',
                mark_safe(details.format(copy=copy, sites=sites)))

        return ([match_url] if match_url else []) + knowns

    @cached_property
    def webext_permissions_list(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, basestring)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []
예제 #8
0
class Version(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon',
                              related_name='versions',
                              on_delete=models.CASCADE)
    license = models.ForeignKey('License', null=True, on_delete=models.CASCADE)
    release_notes = PurifiedField(db_column='releasenotes', short=False)
    approval_notes = models.TextField(db_column='approvalnotes',
                                      default='',
                                      null=True,
                                      blank=True)
    version = models.CharField(max_length=255, default='0.1')
    version_int = models.BigIntegerField(null=True, editable=False)

    nomination = models.DateTimeField(null=True)
    reviewed = models.DateTimeField(null=True)

    deleted = models.BooleanField(default=False)

    source = models.FileField(upload_to=source_upload_path,
                              null=True,
                              blank=True)

    channel = models.IntegerField(choices=amo.RELEASE_CHANNEL_CHOICES,
                                  default=amo.RELEASE_CHANNEL_LISTED)

    git_hash = models.CharField(max_length=40, blank=True)
    source_git_hash = models.CharField(max_length=40, blank=True)

    recommendation_approved = models.BooleanField(null=False, default=False)

    # The order of those managers is very important: please read the lengthy
    # comment above the Addon managers declaration/instantiation.
    unfiltered = VersionManager(include_deleted=True)
    objects = VersionManager()

    class Meta(ModelBase.Meta):
        db_table = 'versions'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'
        ordering = ['-created', '-modified']

    def __init__(self, *args, **kwargs):
        super(Version, self).__init__(*args, **kwargs)
        self.__dict__.update(version_dict(self.version or ''))

    def __str__(self):
        return jinja2.escape(self.version)

    def save(self, *args, **kw):
        if not self.version_int and self.version:
            v_int = version_int(self.version)
            # Magic number warning, this is the maximum size
            # of a big int in MySQL to prevent version_int overflow, for
            # people who have rather crazy version numbers.
            # http://dev.mysql.com/doc/refman/5.5/en/numeric-types.html
            if v_int < 9223372036854775807:
                self.version_int = v_int
            else:
                log.error('No version_int written for version %s, %s' %
                          (self.pk, self.version))
        super(Version, self).save(*args, **kw)
        return self

    @classmethod
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (u'This version has been signed with '
                              u'Mozilla internal certificate.')
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        log.info('New version: %r (%s) from %r' %
                 (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        # For backwards compatibility. We removed specific platform
        # support during submission but we don't handle it any different
        # beyond that yet. That means, we're going to simply set it
        # to `PLATFORM_ALL` and also have the backend create separate
        # files for each platform. Cleaning that up is another step.
        # Given the timing on this, we don't care about updates to legacy
        # add-ons as well.
        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(upload=upload,
                             version=version,
                             platform=amo.PLATFORM_ALL.id,
                             parsed_data=parsed_data)
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        # Extract this version into git repository
        transaction.on_commit(
            lambda: extract_version_to_git_repository(version, upload))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version

    def license_url(self, impala=False):
        return reverse('addons.license', args=[self.addon.slug, self.version])

    def get_url_path(self):
        if self.channel == amo.RELEASE_CHANNEL_UNLISTED:
            return ''
        return reverse('addons.versions', args=[self.addon.slug])

    def delete(self, hard=False):
        # To avoid a circular import
        from .tasks import delete_preview_files

        log.info(u'Version deleted: %r (%s)' % (self, self.id))
        activity.log_create(amo.LOG.DELETE_VERSION, self.addon,
                            str(self.version))

        if hard:
            super(Version, self).delete()
        else:
            # By default we soft delete so we can keep the files for comparison
            # and a record of the version number.
            self.files.update(status=amo.STATUS_DISABLED)
            self.deleted = True
            self.save()

            previews_pks = list(
                VersionPreview.objects.filter(version__id=self.id).values_list(
                    'id', flat=True))

            for preview_pk in previews_pks:
                delete_preview_files.delay(preview_pk)

    @property
    def is_user_disabled(self):
        return self.files.filter(status=amo.STATUS_DISABLED).exclude(
            original_status=amo.STATUS_NULL).exists()

    @is_user_disabled.setter
    def is_user_disabled(self, disable):
        # User wants to disable (and the File isn't already).
        if disable:
            activity.log_create(amo.LOG.DISABLE_VERSION, self.addon, self)
            for file in self.files.exclude(status=amo.STATUS_DISABLED).all():
                file.update(original_status=file.status,
                            status=amo.STATUS_DISABLED)
        # User wants to re-enable (and user did the disable, not Mozilla).
        else:
            activity.log_create(amo.LOG.ENABLE_VERSION, self.addon, self)
            for file in self.files.exclude(
                    original_status=amo.STATUS_NULL).all():
                file.update(status=file.original_status,
                            original_status=amo.STATUS_NULL)

    @cached_property
    def all_activity(self):
        from olympia.activity.models import VersionLog  # yucky
        al = (VersionLog.objects.filter(
            version=self.id).order_by('created').select_related(
                'activity_log', 'version'))
        return al

    @property
    def compatible_apps(self):
        # Dicts, search providers and personas don't have compatibility info.
        # Fake one for them.
        if self.addon and self.addon.type in amo.NO_COMPAT:
            return {app: None for app in amo.APP_TYPE_SUPPORT[self.addon.type]}
        # Otherwise, return _compatible_apps which is a cached property that
        # is filled by the transformer, or simply calculated from the related
        # compat instances.
        return self._compatible_apps

    @cached_property
    def _compatible_apps(self):
        """Get a mapping of {APP: ApplicationsVersions}."""
        avs = self.apps.select_related('version')
        return self._compat_map(avs)

    @cached_property
    def compatible_apps_ordered(self):
        apps = self.compatible_apps.items()
        return sorted(apps, key=lambda v: v[0].short)

    def compatible_platforms(self):
        """Returns a dict of compatible file platforms for this version.

        The result is based on which app(s) the version targets.
        """
        app_ids = [a.application for a in self.apps.all()]
        targets_mobile = amo.ANDROID.id in app_ids
        targets_other = any((id_ != amo.ANDROID.id) for id_ in app_ids)
        all_plats = {}
        if targets_other:
            all_plats.update(amo.DESKTOP_PLATFORMS)
        if targets_mobile:
            all_plats.update(amo.MOBILE_PLATFORMS)
        return all_plats

    @cached_property
    def is_compatible_by_default(self):
        """Returns whether or not the add-on is considered compatible by
        default."""
        # Use self.all_files directly since that's cached and more potentially
        # prefetched through a transformer already
        return not any([
            file for file in self.all_files
            if file.binary_components or file.strict_compatibility
        ])

    def is_compatible_app(self, app):
        """Returns True if the provided app passes compatibility conditions."""
        if self.addon.type in amo.NO_COMPAT:
            return True
        appversion = self.compatible_apps.get(app)
        if appversion and app.id in amo.D2C_MIN_VERSIONS:
            return (version_int(appversion.max.version) >= version_int(
                amo.D2C_MIN_VERSIONS.get(app.id, '*')))
        return False

    def compat_override_app_versions(self):
        """Returns the incompatible app versions range(s).

        If not ranges, returns empty list.  Otherwise, this will return all
        the app version ranges that this particular version is incompatible
        with.
        """
        overrides = list(self.addon.compatoverride_set.all())

        if not overrides:
            return []

        app_versions = []
        for co in overrides:
            for range in co.collapsed_ranges():
                if (version_int(range.min) <= version_int(self.version) <=
                        version_int(range.max)):
                    app_versions.extend([(a.min, a.max) for a in range.apps])
        return app_versions

    @cached_property
    def all_files(self):
        """Shortcut for list(self.files.all()). Cached."""
        return list(self.files.all())

    @property
    def current_file(self):
        """Shortcut for selecting the first file from self.all_files"""
        return self.all_files[0]

    @cached_property
    def supported_platforms(self):
        """Get a list of supported platform names."""
        return list(set(amo.PLATFORMS[f.platform] for f in self.all_files))

    @property
    def status(self):
        return [
            f.STATUS_CHOICES.get(f.status,
                                 ugettext('[status:%s]') % f.status)
            for f in self.all_files
        ]

    @property
    def statuses(self):
        """Unadulterated statuses, good for an API."""
        return [(f.id, f.status) for f in self.all_files]

    def is_public(self):
        # To be public, a version must not be deleted, must belong to a public
        # addon, and all its attached files must have public status.
        try:
            return (not self.deleted and self.addon.is_public()
                    and all(f.status == amo.STATUS_APPROVED
                            for f in self.all_files))
        except ObjectDoesNotExist:
            return False

    @property
    def is_restart_required(self):
        return any(file_.is_restart_required for file_ in self.all_files)

    @property
    def is_webextension(self):
        return any(file_.is_webextension for file_ in self.all_files)

    @property
    def is_mozilla_signed(self):
        """Is the file a special "Mozilla Signed Extension"

        See https://wiki.mozilla.org/Add-ons/InternalSigning for more details.
        We use that information to workaround compatibility limits for legacy
        add-ons and to avoid them receiving negative boosts compared to
        WebExtensions.

        See https://github.com/mozilla/addons-server/issues/6424
        """
        return all(file_.is_mozilla_signed_extension
                   for file_ in self.all_files)

    @property
    def has_files(self):
        return bool(self.all_files)

    @property
    def is_unreviewed(self):
        return bool(
            list(
                filter(lambda f: f.status in amo.UNREVIEWED_FILE_STATUSES,
                       self.all_files)))

    @property
    def is_all_unreviewed(self):
        return not bool([
            f for f in self.all_files
            if f.status not in amo.UNREVIEWED_FILE_STATUSES
        ])

    @property
    def sources_provided(self):
        return bool(self.source)

    @classmethod
    def _compat_map(cls, avs):
        apps = {}
        for av in avs:
            app_id = av.application
            if app_id in amo.APP_IDS:
                apps[amo.APP_IDS[app_id]] = av
        return apps

    @classmethod
    def transformer(cls, versions):
        """Attach all the compatible apps and files to the versions."""
        if not versions:
            return

        ids = set(v.id for v in versions)
        avs = (ApplicationsVersions.objects.filter(
            version__in=ids).select_related('min', 'max'))
        files = File.objects.filter(version__in=ids)

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return dict((k, list(vs)) for k, vs in groups)

        av_dict, file_dict = rollup(avs), rollup(files)

        for version in versions:
            v_id = version.id
            version._compatible_apps = cls._compat_map(av_dict.get(v_id, []))
            version.all_files = file_dict.get(v_id, [])
            for f in version.all_files:
                f.version = version

    @classmethod
    def transformer_activity(cls, versions):
        """Attach all the activity to the versions."""
        from olympia.activity.models import VersionLog  # yucky

        ids = set(v.id for v in versions)
        if not versions:
            return

        al = (VersionLog.objects.filter(
            version__in=ids).order_by('created').select_related(
                'activity_log', 'version'))

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return {k: list(vs) for k, vs in groups}

        al_dict = rollup(al)

        for version in versions:
            v_id = version.id
            version.all_activity = al_dict.get(v_id, [])

    def disable_old_files(self):
        """
        Disable files from versions older than the current one and awaiting
        review. Used when uploading a new version.

        Does nothing if the current instance is unlisted.
        """
        if self.channel == amo.RELEASE_CHANNEL_LISTED:
            qs = File.objects.filter(
                version__addon=self.addon_id,
                version__lt=self.id,
                version__deleted=False,
                status__in=[amo.STATUS_AWAITING_REVIEW, amo.STATUS_PENDING])
            # Use File.update so signals are triggered.
            for f in qs:
                f.update(status=amo.STATUS_DISABLED)

    def reset_nomination_time(self, nomination=None):
        if not self.nomination or nomination:
            nomination = nomination or datetime.datetime.now()
            # We need signal=False not to call update_status (which calls us).
            self.update(nomination=nomination, _signal=False)

    def inherit_nomination(self, from_statuses=None):
        last_ver = (Version.objects.filter(
            addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED).exclude(
                nomination=None).exclude(id=self.pk).order_by('-nomination'))
        if from_statuses:
            last_ver = last_ver.filter(files__status__in=from_statuses)
        if last_ver.exists():
            self.reset_nomination_time(nomination=last_ver[0].nomination)

    @property
    def unreviewed_files(self):
        """A File is unreviewed if its status is amo.STATUS_AWAITING_REVIEW."""
        return self.files.filter(status=amo.STATUS_AWAITING_REVIEW)

    @property
    def is_ready_for_auto_approval(self):
        """Return whether or not this version could be *considered* for
        auto-approval.

        Does not necessarily mean that it would be auto-approved, just that it
        passes the most basic criteria to be considered a candidate by the
        auto_approve command."""
        return Version.objects.auto_approvable().filter(id=self.id).exists()

    @property
    def was_auto_approved(self):
        """Return whether or not this version was auto-approved."""
        from olympia.reviewers.models import AutoApprovalSummary
        try:
            return self.is_public() and AutoApprovalSummary.objects.filter(
                version=self).get().verdict == amo.AUTO_APPROVED
        except AutoApprovalSummary.DoesNotExist:
            pass
        return False

    def get_background_images_encoded(self, header_only=False):
        if not self.has_files:
            return {}
        file_obj = self.all_files[0]
        return {
            name: force_text(b64encode(background))
            for name, background in utils.get_background_images(
                file_obj, theme_data=None, header_only=header_only).items()
        }
예제 #9
0
class Rating(ModelBase):
    RATING_CHOICES = (
        (None, _('None')),
        (0, '☆☆☆☆☆'),
        (1, '☆☆☆☆★'),
        (2, '☆☆☆★★'),
        (3, '☆☆★★★'),
        (4, '☆★★★★'),
        (5, '★★★★★'),
    )
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon',
                              related_name='_ratings',
                              on_delete=models.CASCADE)
    version = models.ForeignKey('versions.Version',
                                related_name='ratings',
                                null=True,
                                on_delete=models.CASCADE)
    user = models.ForeignKey('users.UserProfile',
                             related_name='_ratings_all',
                             on_delete=models.CASCADE)
    reply_to = models.OneToOneField(
        'self',
        null=True,
        related_name='reply',
        db_column='reply_to',
        on_delete=models.CASCADE,
    )

    rating = models.PositiveSmallIntegerField(null=True,
                                              choices=RATING_CHOICES)
    body = models.TextField(db_column='text_body', null=True)
    ip_address = models.CharField(max_length=255, default='0.0.0.0')

    editorreview = models.BooleanField(default=False)
    flag = models.BooleanField(default=False)

    deleted = models.BooleanField(default=False)

    # Denormalized fields for easy lookup queries.
    is_latest = models.BooleanField(
        default=True,
        editable=False,
        help_text="Is this the user's latest rating for the add-on?",
    )
    previous_count = models.PositiveIntegerField(
        default=0,
        editable=False,
        help_text='How many previous ratings by the user for this add-on?',
    )

    unfiltered = RatingManager(include_deleted=True)
    objects = RatingManager()
    without_replies = WithoutRepliesRatingManager()

    class Meta:
        db_table = 'reviews'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'
        ordering = ('-created', )
        indexes = [
            models.Index(fields=('version', ), name='version_id'),
            models.Index(fields=('user', ), name='reviews_ibfk_2'),
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(
                fields=('reply_to', 'is_latest', 'addon', 'created'),
                name='latest_reviews',
            ),
        ]
        constraints = [
            models.UniqueConstraint(fields=('version', 'user', 'reply_to'),
                                    name='one_review_per_user'),
        ]

    def __str__(self):
        return truncate(str(self.body), 10)

    def __init__(self, *args, **kwargs):
        user_responsible = kwargs.pop('user_responsible', None)
        super(Rating, self).__init__(*args, **kwargs)
        if user_responsible is not None:
            self.user_responsible = user_responsible

    @property
    def user_responsible(self):
        """Return user responsible for the current changes being made on this
        model. Only set by the views when they are about to save a Review
        instance, to track if the original author or an admin was responsible
        for the change.

        Having this as a @property with a setter makes update_or_create() work,
        otherwise it rejects the property, causing an error."""
        return self._user_responsible

    @user_responsible.setter
    def user_responsible(self, value):
        self._user_responsible = value

    def get_url_path(self):
        return jinja_helpers.url('addons.ratings.detail', self.addon.slug,
                                 self.id)

    def approve(self, user):
        from olympia.reviewers.models import ReviewerScore

        activity.log_create(
            amo.LOG.APPROVE_RATING,
            self.addon,
            self,
            user=user,
            details=dict(
                body=str(self.body),
                addon_id=self.addon.pk,
                addon_title=str(self.addon.name),
                is_flagged=self.ratingflag_set.exists(),
            ),
        )
        for flag in self.ratingflag_set.all():
            flag.delete()
        self.editorreview = False
        # We've already logged what we want to log, no need to pass
        # user_responsible=user.
        self.save()
        ReviewerScore.award_moderation_points(user, self.addon, self.pk)

    def delete(self, user_responsible=None, send_post_save_signal=True):
        if user_responsible is None:
            user_responsible = self.user

        rating_was_moderated = False
        # Log deleting ratings to moderation log,
        # except if the author deletes it
        if user_responsible != self.user:
            # Remember moderation state
            rating_was_moderated = True
            from olympia.reviewers.models import ReviewerScore

            activity.log_create(
                amo.LOG.DELETE_RATING,
                self.addon,
                self,
                user=user_responsible,
                details={
                    'body': str(self.body),
                    'addon_id': self.addon.pk,
                    'addon_title': str(self.addon.name),
                    'is_flagged': self.ratingflag_set.exists(),
                },
            )
            for flag in self.ratingflag_set.all():
                flag.delete()

        log.info(
            'Rating deleted: %s deleted id:%s by %s ("%s")',
            user_responsible.name,
            self.pk,
            self.user.name,
            str(self.body),
        )
        self.update(deleted=True, _signal=send_post_save_signal)
        # Force refreshing of denormalized data (it wouldn't happen otherwise
        # because we're not dealing with a creation).
        self.update_denormalized_fields()

        if rating_was_moderated:
            ReviewerScore.award_moderation_points(user_responsible, self.addon,
                                                  self.pk)

    def undelete(self):
        self.update(deleted=False)
        # Force refreshing of denormalized data (it wouldn't happen otherwise
        # because we're not dealing with a creation).
        self.update_denormalized_fields()

    @classmethod
    def get_replies(cls, ratings):
        ratings = [r.id for r in ratings]
        qs = Rating.objects.filter(reply_to__in=ratings)
        return dict((r.reply_to_id, r) for r in qs)

    def send_notification_email(self):
        if self.reply_to:
            # It's a reply.
            reply_url = jinja_helpers.url(
                'addons.ratings.detail',
                self.addon.slug,
                self.reply_to.pk,
                add_prefix=False,
            )
            data = {
                'name': self.addon.name,
                'reply': self.body,
                'rating_url': jinja_helpers.absolutify(reply_url),
            }
            recipients = [self.reply_to.user.email]
            subject = 'Mozilla Add-on Developer Reply: %s' % self.addon.name
            template = 'ratings/emails/reply_review.ltxt'
            perm_setting = 'reply'
        else:
            # It's a new rating.
            rating_url = jinja_helpers.url('addons.ratings.detail',
                                           self.addon.slug,
                                           self.pk,
                                           add_prefix=False)
            data = {
                'name': self.addon.name,
                'rating': self,
                'rating_url': jinja_helpers.absolutify(rating_url),
            }
            recipients = [author.email for author in self.addon.authors.all()]
            subject = 'Mozilla Add-on User Rating: %s' % self.addon.name
            template = 'ratings/emails/new_rating.txt'
            perm_setting = 'new_review'
        send_mail_jinja(
            subject,
            template,
            data,
            recipient_list=recipients,
            perm_setting=perm_setting,
        )

    def update_denormalized_fields(self):
        from . import tasks

        pair = self.addon_id, self.user_id
        tasks.update_denorm(pair)

    def post_save(sender, instance, created, **kwargs):
        from olympia.addons.models import update_search_index
        from . import tasks

        if kwargs.get('raw'):
            return

        if getattr(instance, 'user_responsible', None):
            # user_responsible is not a field on the model, so it's not
            # persistent: it's just something the views will set temporarily
            # when manipulating a Rating that indicates a real user made that
            # change.
            action = 'New' if created else 'Edited'
            if instance.reply_to:
                log.info('%s reply to %s: %s' %
                         (action, instance.reply_to_id, instance.pk))
            else:
                log.info('%s rating: %s' % (action, instance.pk))

            # For new ratings - not replies - and all edits (including replies
            # this time) by users we want to insert a new ActivityLog.
            new_rating_or_edit = not instance.reply_to or not created
            if new_rating_or_edit:
                action = amo.LOG.ADD_RATING if created else amo.LOG.EDIT_RATING
                activity.log_create(action,
                                    instance.addon,
                                    instance,
                                    user=instance.user_responsible)

            # For new ratings and new replies we want to send an email.
            if created:
                instance.send_notification_email()

        if created:
            # Do this immediately synchronously so is_latest is correct before
            # we fire the aggregates task.
            instance.update_denormalized_fields()

        # Rating counts have changed, so run the task and trigger a reindex.
        tasks.addon_rating_aggregates.delay(instance.addon_id)
        update_search_index(instance.addon.__class__, instance.addon)
예제 #10
0
class Translation(ModelBase):
    """
    Translation model.

    Use :class:`translations.fields.TranslatedField` instead of a plain foreign
    key to this model.
    """

    autoid = PositiveAutoField(primary_key=True)
    id = models.PositiveIntegerField()
    locale = models.CharField(max_length=10)
    localized_string = models.TextField(null=True)
    localized_string_clean = models.TextField(null=True)

    objects = TranslationManager()

    class Meta:
        db_table = 'translations'
        constraints = [
            models.UniqueConstraint(fields=('id', 'locale'), name='id'),
        ]

    def __str__(self):
        return str(self.localized_string) if self.localized_string else ''

    def __bool__(self):
        # __bool__ is called to evaluate an object in a boolean context.
        # We want Translations to be falsy if their string is empty.
        return bool(self.localized_string) and bool(
            self.localized_string.strip())

    __nonzero__ = __bool__  # Python 2 compatibility.

    def __lt__(self, other):
        if hasattr(other, 'localized_string'):
            return self.localized_string < other.localized_string
        else:
            return self.localized_string < other

    def __eq__(self, other):
        # Django implements an __eq__ that only checks pks. We need to check
        # the strings if we're dealing with existing vs. unsaved Translations.
        if hasattr(other, 'localized_string'):
            return self.localized_string == other.localized_string
        else:
            return self.localized_string == other

    def __hash__(self):
        return hash(self.localized_string)

    def clean(self):
        if self.localized_string:
            self.localized_string = self.localized_string.strip()

    def save(self, **kwargs):
        self.clean()
        return super(Translation, self).save(**kwargs)

    def delete(self, using=None):
        # FIXME: if the Translation is the one used as default/fallback,
        # then deleting it will mean the corresponding field on the related
        # model will stay empty even if there are translations in other
        # languages!
        cls = self.__class__
        using = using or router.db_for_write(cls, instance=self)
        # Look for all translations for the same string (id=self.id) except the
        # current one (autoid=self.autoid).
        qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid)
        if qs.using(using).exists():
            # If other Translations for the same id exist, we just need to
            # delete this one and *only* this one, without letting Django
            # collect dependencies (it'd remove the others, which we want to
            # keep).
            assert self._get_pk_val() is not None
            collector = Collector(using=using)
            collector.collect([self], collect_related=False)
            # In addition, because we have FK pointing to a non-unique column,
            # we need to force MySQL to ignore constraints because it's dumb
            # and would otherwise complain even if there are remaining rows
            # that matches the FK.
            with connections[using].constraint_checks_disabled():
                collector.delete()
        else:
            # If no other Translations with that id exist, then we should let
            # django behave normally. It should find the related model and set
            # the FKs to NULL.
            return super(Translation, self).delete(using=using)

    delete.alters_data = True

    @classmethod
    def new(cls, string, locale, id=None):
        """
        Jumps through all the right hoops to create a new translation.

        If ``id`` is not given a new id will be created using
        ``translations_seq``.  Otherwise, the id will be used to add strings to
        an existing translation.

        To increment IDs we use a setting on MySQL. This is to support multiple
        database masters -- it's just crazy enough to work! See bug 756242.
        """
        if id is None:
            # Get a sequence key for the new translation.
            with connections['default'].cursor() as cursor:
                cursor.execute("""
                    UPDATE `translations_seq`
                    SET `id`=LAST_INSERT_ID(
                        `id` + @@global.auto_increment_increment
                    )
                """)

                # The sequence table should never be empty. But alas, if it is,
                # let's fix it.
                if not cursor.rowcount > 0:
                    cursor.execute("""
                        INSERT INTO `translations_seq` (`id`)
                        VALUES(LAST_INSERT_ID(
                            `id` + @@global.auto_increment_increment
                        ))
                    """)
                cursor.execute('SELECT LAST_INSERT_ID()')
                id = cursor.fetchone()[0]

        # Update if one exists, otherwise create a new one.
        q = {'id': id, 'locale': locale}
        try:
            trans = cls.objects.get(**q)
            trans.localized_string = string
        except cls.DoesNotExist:
            trans = cls(localized_string=string, **q)

        return trans
예제 #11
0
class IPNetworkUserRestriction(RestrictionAbstractBaseModel):
    id = PositiveAutoField(primary_key=True)
    network = CIDRField(
        blank=True,
        null=True,
        help_text=_(
            'Enter a valid IPv4 or IPv6 CIDR network range, eg. 127.0.0.1/28'),
    )

    error_message = _('Multiple add-ons violating our policies have been'
                      ' submitted from your location. The IP address has been'
                      ' blocked.')

    class Meta:
        db_table = 'users_user_network_restriction'

    def __str__(self):
        return str(self.network)

    @classmethod
    def allow_submission(cls, request):
        """
        Return whether the specified request should be allowed to submit
        add-ons.
        """
        try:
            remote_addr = ipaddress.ip_address(request.META.get('REMOTE_ADDR'))
            user_last_login_ip = (ipaddress.ip_address(
                request.user.last_login_ip) if request.user else None)
        except ValueError:
            # If we don't have a valid ip address, let's deny
            return False

        return cls.allow_ips(
            remote_addr,
            user_last_login_ip,
            restriction_type=RESTRICTION_TYPES.SUBMISSION,
        )

    @classmethod
    def allow_auto_approval(cls, upload):
        if not upload.user or not upload.ip_address:
            return False

        try:
            remote_addr = ipaddress.ip_address(upload.ip_address)
            user_last_login_ip = ipaddress.ip_address(
                upload.user.last_login_ip)
        except ValueError:
            # If we don't have a valid ip address, let's deny
            return False

        return cls.allow_ips(remote_addr,
                             user_last_login_ip,
                             restriction_type=RESTRICTION_TYPES.APPROVAL)

    @classmethod
    def allow_ips(self, remote_addr, user_last_login_ip, *, restriction_type):
        restrictions = IPNetworkUserRestriction.objects.all().filter(
            restriction_type=restriction_type)
        for restriction in restrictions:
            if (remote_addr in restriction.network
                    or user_last_login_ip in restriction.network):
                # The following log statement is used by foxsec-pipeline.
                log.info(
                    'Restricting request from %s %s, %s %s (%s)',
                    'ip',
                    remote_addr,
                    'last_login_ip',
                    user_last_login_ip,
                    'network=%s' % restriction.network,
                )
                return False

        return True
예제 #12
0
class CollectionAddon(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
    collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
    # category (deprecated: for "Fashion Your Firefox")
    comments = LinkifiedField(null=True)
    user = models.ForeignKey(UserProfile, null=True, on_delete=models.CASCADE)

    ordering = models.PositiveIntegerField(
        default=0,
        help_text='Add-ons are displayed in ascending order based on this field.',
    )

    class Meta(ModelBase.Meta):
        db_table = 'addons_collections'
        indexes = [
            models.Index(
                fields=('collection', 'created'), name='addons_collections_created_idx'
            ),
            models.Index(fields=('addon',), name='addons_collections_addon_idx'),
            models.Index(fields=('collection',), name='collection_id'),
            models.Index(fields=('user',), name='addons_collections_user_id'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('addon', 'collection'), name='addon_id_2'),
        ]

    @staticmethod
    def post_save(sender, instance, **kwargs):
        """Update Collection.addon_count and reindex add-on if the collection
        is featured."""
        from olympia.addons.tasks import index_addons

        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(
                amo.LOG.ADD_TO_COLLECTION, instance.addon, instance.collection
            )
        kwargs['addons'] = [instance.addon]
        Collection.post_save(sender, instance.collection, **kwargs)
        if instance.collection.id == settings.COLLECTION_FEATURED_THEMES_ID:
            # That collection is special: each add-on in it is considered
            # recommended, so we need to index the corresponding add-on.
            # (Note: we are considering the add-on in a given CollectionAddon
            #  never changes, to change add-ons belonging to a collection we
            #  add or remove CollectionAddon instances, we never modify the
            #  addon foreignkey of an existing instance).
            index_addons.delay([instance.addon.id])

    @staticmethod
    def post_delete(sender, instance, **kwargs):

        from olympia.addons.tasks import index_addons

        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(
                amo.LOG.REMOVE_FROM_COLLECTION, instance.addon, instance.collection
            )
        kwargs['addons'] = [instance.addon]
        Collection.post_save(sender, instance.collection, **kwargs)
        if instance.collection.id == settings.COLLECTION_FEATURED_THEMES_ID:
            # That collection is special: each add-on in it is considered
            # recommended, so we need to index the add-on we just removed from
            # it.
            index_addons.delay([instance.addon.id])
예제 #13
0
class Version(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon',
                              related_name='versions',
                              on_delete=models.CASCADE)
    license = models.ForeignKey('License',
                                null=True,
                                blank=True,
                                on_delete=models.SET_NULL)
    release_notes = PurifiedField(db_column='releasenotes', short=False)
    approval_notes = models.TextField(db_column='approvalnotes',
                                      default='',
                                      null=True,
                                      blank=True)
    version = VersionStringField(max_length=255, default='0.1')

    nomination = models.DateTimeField(null=True)
    reviewed = models.DateTimeField(null=True)

    deleted = models.BooleanField(default=False)

    source = models.FileField(upload_to=source_upload_path,
                              null=True,
                              blank=True,
                              max_length=255)

    channel = models.IntegerField(choices=amo.RELEASE_CHANNEL_CHOICES,
                                  default=amo.RELEASE_CHANNEL_LISTED)

    git_hash = models.CharField(max_length=40, blank=True)

    needs_human_review = models.BooleanField(default=False)

    # The order of those managers is very important: please read the lengthy
    # comment above the Addon managers declaration/instantiation.
    unfiltered = VersionManager(include_deleted=True)
    objects = VersionManager()

    # See UnfilteredVersionManagerForRelations() docstring for usage of this
    # special manager.
    unfiltered_for_relations = UnfilteredVersionManagerForRelations()

    class Meta(ModelBase.Meta):
        db_table = 'versions'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'
        ordering = ['-created', '-modified']
        indexes = [
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('license', ), name='license_id'),
        ]
        constraints = [
            models.UniqueConstraint(
                fields=('addon', 'version'),
                name='versions_addon_id_version_5a2e75b6_uniq',
            ),
        ]

    def __str__(self):
        return markupsafe.escape(self.version)

    @classmethod
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.addons.utils import RestrictionChecker
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if not upload.user or not upload.ip_address or not upload.source:
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        if not upload.user.last_login_ip or not upload.user.email:
            raise VersionCreateError(
                'FileUpload user does not have some required fields')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        email = upload.user.email if upload.user and upload.user.email else ''
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                f'New version: {version!r} ({version.id}) from {upload!r}',
                extra={
                    'email': email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user or get_task_user())

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # Pre-generate _compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version._compatible_apps = compatible_apps

        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(
                upload=upload,
                version=version,
                parsed_data=parsed_data,
            )
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if version.is_webextension:
            if (waffle.switch_is_active('enable-yara')
                    or waffle.switch_is_active('enable-customs')
                    or waffle.switch_is_active('enable-wat')):
                ScannerResult.objects.filter(upload_id=upload.id).update(
                    version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        # Check if the approval should be restricted
        if not RestrictionChecker(upload=upload).is_auto_approval_allowed():
            flag = ('auto_approval_disabled'
                    if channel == amo.RELEASE_CHANNEL_LISTED else
                    'auto_approval_disabled_unlisted')
            reviewer_flags_defaults[flag] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version

    def license_url(self, impala=False):
        return reverse('addons.license', args=[self.addon.slug, self.version])

    def get_url_path(self):
        if self.channel == amo.RELEASE_CHANNEL_UNLISTED:
            return ''
        return reverse('addons.versions', args=[self.addon.slug])

    def delete(self, hard=False):
        # To avoid a circular import
        from .tasks import delete_preview_files

        log.info(f'Version deleted: {self!r} ({self.id})')
        activity.log_create(amo.LOG.DELETE_VERSION, self.addon,
                            str(self.version))

        if hard:
            super().delete()
        else:
            # By default we soft delete so we can keep the files for comparison
            # and a record of the version number.
            self.files.update(status=amo.STATUS_DISABLED)
            self.deleted = True
            self.save()

            # Clear pending rejection flag (we have the activity log for
            # records purposes, the flag serves no purpose anymore if the
            # version is deleted).
            VersionReviewerFlags.objects.filter(version=self).update(
                pending_rejection=None)

            previews_pks = list(
                VersionPreview.objects.filter(version__id=self.id).values_list(
                    'id', flat=True))

            for preview_pk in previews_pks:
                delete_preview_files.delay(preview_pk)

    @property
    def is_user_disabled(self):
        return (self.files.filter(status=amo.STATUS_DISABLED).exclude(
            original_status=amo.STATUS_NULL).exists())

    @is_user_disabled.setter
    def is_user_disabled(self, disable):
        # User wants to disable (and the File isn't already).
        if disable:
            activity.log_create(amo.LOG.DISABLE_VERSION, self.addon, self)
            for file in self.files.exclude(status=amo.STATUS_DISABLED).all():
                file.update(original_status=file.status,
                            status=amo.STATUS_DISABLED)
        # User wants to re-enable (and user did the disable, not Mozilla).
        else:
            activity.log_create(amo.LOG.ENABLE_VERSION, self.addon, self)
            for file in self.files.exclude(
                    original_status=amo.STATUS_NULL).all():
                file.update(status=file.original_status,
                            original_status=amo.STATUS_NULL)

    @cached_property
    def all_activity(self):
        # prefetch_related() and not select_related() the ActivityLog to make
        # sure its transformer is called.
        return self.versionlog_set.prefetch_related('activity_log').order_by(
            'created')

    @property
    def compatible_apps(self):
        # Dicts and search providers don't have compatibility info.
        # Fake one for them.
        if self.addon and self.addon.type in amo.NO_COMPAT:
            return {app: None for app in amo.APP_TYPE_SUPPORT[self.addon.type]}
        # Otherwise, return _compatible_apps which is a cached property that
        # is filled by the transformer, or simply calculated from the related
        # compat instances.
        return self._compatible_apps

    @cached_property
    def _compatible_apps(self):
        """Get a mapping of {APP: ApplicationsVersions}."""
        return self._compat_map(self.apps.all().select_related('min', 'max'))

    @cached_property
    def compatible_apps_ordered(self):
        apps = self.compatible_apps.items()
        return sorted(apps, key=lambda v: v[0].short)

    @cached_property
    def is_compatible_by_default(self):
        """Returns whether or not the add-on is considered compatible by
        default."""
        # Use self.all_files directly since that's cached and more potentially
        # prefetched through a transformer already
        return not any([
            file for file in self.all_files
            if file.binary_components or file.strict_compatibility
        ])

    def is_compatible_app(self, app):
        """Returns True if the provided app passes compatibility conditions."""
        if self.addon.type in amo.NO_COMPAT:
            return True
        appversion = self.compatible_apps.get(app)
        if appversion and app.id in amo.D2C_MIN_VERSIONS:
            return version_int(appversion.max.version) >= version_int(
                amo.D2C_MIN_VERSIONS.get(app.id, '*'))
        return False

    def compat_override_app_versions(self):
        """Returns the incompatible app versions range(s).

        If not ranges, returns empty list.  Otherwise, this will return all
        the app version ranges that this particular version is incompatible
        with.
        """
        overrides = list(self.addon.compatoverride_set.all())

        if not overrides:
            return []

        app_versions = []
        for co in overrides:
            for range in co.collapsed_ranges():
                if (version_int(range.min) <= version_int(self.version) <=
                        version_int(range.max)):
                    app_versions.extend([(a.min, a.max) for a in range.apps])
        return app_versions

    @cached_property
    def all_files(self):
        """Shortcut for list(self.files.all()). Cached."""
        return list(self.files.all())

    @property
    def current_file(self):
        """Shortcut for selecting the first file from self.all_files"""
        return self.all_files[0]

    @property
    def status(self):
        return [
            f.STATUS_CHOICES.get(f.status,
                                 gettext('[status:%s]') % f.status)
            for f in self.all_files
        ]

    @property
    def statuses(self):
        """Unadulterated statuses, good for an API."""
        return [(f.id, f.status) for f in self.all_files]

    def is_public(self):
        # To be public, a version must not be deleted, must belong to a public
        # addon, and all its attached files must have public status.
        try:
            return (not self.deleted and self.addon.is_public()
                    and all(f.status == amo.STATUS_APPROVED
                            for f in self.all_files))
        except ObjectDoesNotExist:
            return False

    @property
    def is_webextension(self):
        return any(file_.is_webextension for file_ in self.all_files)

    @property
    def is_mozilla_signed(self):
        """Is the file a special "Mozilla Signed Extension"

        See https://wiki.mozilla.org/Add-ons/InternalSigning for more details.
        We use that information to workaround compatibility limits for legacy
        add-ons and to avoid them receiving negative boosts compared to
        WebExtensions.

        See https://github.com/mozilla/addons-server/issues/6424
        """
        return all(file_.is_mozilla_signed_extension
                   for file_ in self.all_files)

    @property
    def has_files(self):
        return bool(self.all_files)

    @property
    def is_unreviewed(self):
        return bool(
            list(
                filter(lambda f: f.status in amo.UNREVIEWED_FILE_STATUSES,
                       self.all_files)))

    @property
    def is_all_unreviewed(self):
        return not bool([
            f for f in self.all_files
            if f.status not in amo.UNREVIEWED_FILE_STATUSES
        ])

    @property
    def sources_provided(self):
        return bool(self.source)

    def _compat_map(self, avs):
        apps = {}
        for av in avs:
            av.version = self
            app_id = av.application
            if app_id in amo.APP_IDS:
                apps[amo.APP_IDS[app_id]] = av
        return apps

    @classmethod
    def transformer(cls, versions):
        """Attach all the compatible apps and files to the versions."""
        if not versions:
            return

        ids = {v.id for v in versions}
        avs = ApplicationsVersions.objects.filter(
            version__in=ids).select_related('min', 'max')
        files = File.objects.filter(version__in=ids)

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return {k: list(vs) for k, vs in groups}

        av_dict, file_dict = rollup(avs), rollup(files)

        for version in versions:
            v_id = version.id
            version._compatible_apps = version._compat_map(
                av_dict.get(v_id, []))
            version.all_files = file_dict.get(v_id, [])
            for f in version.all_files:
                f.version = version

    @classmethod
    def transformer_promoted(cls, versions):
        """Attach the promoted approvals to the versions."""
        if not versions:
            return

        PromotedApproval = versions[0].promoted_approvals.model

        ids = {v.id for v in versions}

        approvals = list(
            PromotedApproval.objects.filter(version_id__in=ids).values_list(
                'version_id', 'group_id', 'application_id', named=True))

        approval_dict = {
            version_id: list(groups)
            for version_id, groups in sorted_groupby(approvals, 'version_id')
        }
        for version in versions:
            v_id = version.id
            groups = [(
                PROMOTED_GROUPS_BY_ID.get(approval.group_id),
                APP_IDS.get(approval.application_id),
            ) for approval in approval_dict.get(v_id, [])
                      if approval.group_id in PROMOTED_GROUPS_BY_ID]
            version.approved_for_groups = groups

    @classmethod
    def transformer_activity(cls, versions):
        """Attach all the activity to the versions."""
        from olympia.activity.models import VersionLog

        ids = {v.id for v in versions}
        if not versions:
            return

        # Ideally, we'd start from the ActivityLog, but because VersionLog
        # to ActivityLog isn't a OneToOneField, we wouldn't be able to find
        # the version easily afterwards - we can't even do a
        # select_related('versionlog') and try to traverse the relation to find
        # the version. So, instead, start from VersionLog, but make sure to use
        # prefetch_related() (and not select_related() - yes, it's one extra
        # query, but it's worth it to benefit from the default transformer) so
        # that the ActivityLog default transformer is called.
        al = (VersionLog.objects.prefetch_related('activity_log').filter(
            version__in=ids).order_by('created'))

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return {k: list(vs) for k, vs in groups}

        al_dict = rollup(al)

        for version in versions:
            v_id = version.id
            version.all_activity = al_dict.get(v_id, [])

    @classmethod
    def transformer_license(cls, versions):
        """Attach all the licenses to the versions.

        Do not use if you need the license text: it's explicitly deferred in
        this transformer, because it should only be used when listing multiple
        versions, where returning license text is not supposed to be needed.

        The translations app doesn't fully handle evaluating a deferred field,
        so the callers need to make sure the license text will never be needed
        on instances returned by a queryset transformed by this method."""
        if not versions:
            return
        license_ids = {ver.license_id for ver in versions}
        licenses = License.objects.filter(id__in=license_ids).defer('text')
        license_dict = {lic.id: lic for lic in licenses}

        for version in versions:
            license = license_dict.get(version.license_id)
            if license:
                version.license = license

    @classmethod
    def transformer_auto_approvable(cls, versions):
        """Attach  auto-approvability information to the versions."""
        ids = {v.id for v in versions}
        if not ids:
            return

        auto_approvable = (Version.objects.auto_approvable().filter(
            pk__in=ids).values_list('pk', flat=True))

        for version in versions:
            version.is_ready_for_auto_approval = version.pk in auto_approvable

    def disable_old_files(self):
        """
        Disable files from versions older than the current one in the same
        channel and awaiting review. Used when uploading a new version.

        Does nothing if the current instance is unlisted.
        """
        if self.channel == amo.RELEASE_CHANNEL_LISTED:
            qs = File.objects.filter(
                version__addon=self.addon_id,
                version__lt=self.id,
                version__deleted=False,
                version__channel=self.channel,
                status=amo.STATUS_AWAITING_REVIEW,
            )
            # Use File.update so signals are triggered.
            for f in qs:
                f.update(status=amo.STATUS_DISABLED)

    def reset_nomination_time(self, nomination=None):
        if not self.nomination or nomination:
            nomination = nomination or datetime.datetime.now()
            # We need signal=False not to call update_status (which calls us).
            self.update(nomination=nomination, _signal=False)

    def inherit_nomination(self, from_statuses=None):
        last_ver = (Version.objects.filter(
            addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED).exclude(
                nomination=None).exclude(id=self.pk).order_by('-nomination'))
        if from_statuses:
            last_ver = last_ver.filter(files__status__in=from_statuses)
        if last_ver.exists():
            self.reset_nomination_time(nomination=last_ver[0].nomination)

    @property
    def unreviewed_files(self):
        """A File is unreviewed if its status is amo.STATUS_AWAITING_REVIEW."""
        return self.files.filter(status=amo.STATUS_AWAITING_REVIEW)

    @cached_property
    def is_ready_for_auto_approval(self):
        """Return whether or not this version could be *considered* for
        auto-approval.

        Does not necessarily mean that it would be auto-approved, just that it
        passes the most basic criteria to be considered a candidate by the
        auto_approve command."""
        return Version.objects.auto_approvable().filter(id=self.id).exists()

    @property
    def was_auto_approved(self):
        """Return whether or not this version was auto-approved."""
        from olympia.reviewers.models import AutoApprovalSummary

        try:
            return (self.is_public() and AutoApprovalSummary.objects.filter(
                version=self).get().verdict == amo.AUTO_APPROVED)
        except AutoApprovalSummary.DoesNotExist:
            pass
        return False

    def get_background_images_encoded(self, header_only=False):
        if not self.has_files:
            return {}
        file_obj = self.all_files[0]
        return {
            name: force_str(b64encode(background))
            for name, background in utils.get_background_images(
                file_obj, theme_data=None, header_only=header_only).items()
        }

    def can_be_disabled_and_deleted(self):
        # see https://github.com/mozilla/addons-server/issues/15121#issuecomment-667226959  # noqa
        # "It should apply to the <groups> that require a review to be badged"
        from olympia.promoted.models import PromotedApproval

        if self != self.addon.current_version or (
                not (group := self.addon.promoted_group())
                or not (group.badged and group.pre_review)):
            return True

        previous_ver = (self.addon.versions.valid().filter(
            channel=self.channel).exclude(id=self.id).no_transforms()[:1])
        previous_approval = PromotedApproval.objects.filter(
            group_id=group.id, version__in=previous_ver)
        return previous_approval.exists()
예제 #14
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    platform = models.PositiveIntegerField(
        choices=amo.SUPPORTED_PLATFORMS_CHOICES,
        default=amo.PLATFORM_ALL.id,
        db_column="platform_id")
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False, db_index=True)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'

    def __str__(self):
        return six.text_type(self.id)

    def get_platform_display(self):
        return force_text(amo.PLATFORMS[self.platform].name)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, src, attachment=False):
        return self._make_download_url('downloads.file',
                                       src,
                                       attachment=attachment)

    def _make_download_url(self, view_name, src, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return absolutify(urlparams(url, src=src))

    @classmethod
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload_path = force_text(nfd_str(upload.path))
        ext = force_text(os.path.splitext(upload_path)[1])
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.save()

        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        with open(filename or self.current_file_path, 'rb') as fobj:
            return 'sha256:{}'.format(get_sha256(fobj))

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}-{platform}
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        if self.platform and self.platform != amo.PLATFORM_ALL.id:
            parts.append(amo.PLATFORMS[self.platform].shortname)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return u'%s...%s' % (m.group('slug')[0:(maxlen - 3)],
                             m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if self.platform != amo.PLATFORM_ALL.id:
            kw['platform'] = self.platform
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(reverse('downloads.latest', kwargs=kw),
                            'addon-%s-latest%s' % (addon.pk, self.extension))

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source, destination, log_message):
        """Move a file from `source` to `destination`."""
        log_message = force_text(log_message)
        try:
            if storage.exists(source):
                log.info(
                    log_message.format(source=source, destination=destination))
                move_stored_file(source, destination)
        except (UnicodeEncodeError, IOError):
            msg = u'Move Failure: {} {}'.format(source, destination)
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a disabled file to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    _get_localepicker = re.compile(r'^locale browser ([\w\-_]+) (.*)$', re.M)

    @memoize(prefix='localepicker', timeout=None)
    def get_localepicker(self):
        """
        For a file that is part of a language pack, extract
        the chrome/localepicker.properties file and return as
        a string.
        """
        start = time.time()

        try:
            zip_ = SafeZip(self.file_path)
        except (zipfile.BadZipfile, IOError):
            return ''

        try:
            manifest = force_text(zip_.read('chrome.manifest'))
        except KeyError:
            log.info('No file named: chrome.manifest in file: %s' % self.pk)
            return ''

        res = self._get_localepicker.search(manifest)
        if not res:
            log.error('Locale browser not in chrome.manifest: %s' % self.pk)
            return ''

        try:
            path = res.groups()[1]
            if 'localepicker.properties' not in path:
                path = os.path.join(path, 'localepicker.properties')
            res = zip_.extract_from_manifest(path)
        except (zipfile.BadZipfile, IOError) as e:
            log.error('Error unzipping: %s, %s in file: %s' %
                      (path, e, self.pk))
            return ''
        except (ValueError, KeyError) as e:
            log.error('No file named: %s in file: %s' % (e, self.pk))
            return ''

        end = time.time() - start
        log.info('Extracted localepicker file: %s in %.2fs' % (self.pk, end))
        statsd.timing('files.extract.localepicker', (end * 1000))
        return force_text(res)

    @cached_property
    def webext_permissions_list(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, six.string_types)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []
예제 #15
0
class APIKey(ModelBase):
    """
    A developer's key/secret pair to access the API.
    """
    id = PositiveAutoField(primary_key=True)
    user = models.ForeignKey(UserProfile, related_name='api_keys')

    # A user can only have one active key at the same time, it's enforced by
    # a unique db constraint. Since we keep old inactive keys though, nulls
    # need to be allowed (and we need to always set is_active=None instead of
    # is_active=False when revoking keys).
    is_active = models.NullBooleanField(default=True)
    type = models.PositiveIntegerField(choices=dict(
        zip(API_KEY_TYPES, API_KEY_TYPES)).items(),
                                       default=0)
    key = models.CharField(max_length=255, db_index=True, unique=True)
    # TODO: use RSA public keys instead? If we were to use JWT RSA keys
    # then we'd only need to store the public key.
    secret = AESField(aes_key='api_key:secret')

    class Meta:
        db_table = 'api_key'
        unique_together = (('user', 'is_active'), )

    def __str__(self):
        return (
            u'<{cls} user={user}, type={type}, key={key} secret=...>'.format(
                cls=self.__class__.__name__,
                key=self.key,
                type=self.type,
                user=self.user))

    @classmethod
    def get_jwt_key(cls, **kwargs):
        """
        Return a single active APIKey instance for a given user or key.
        """
        kwargs['is_active'] = True
        return cls.objects.get(type=SYMMETRIC_JWT_TYPE, **kwargs)

    @classmethod
    def new_jwt_credentials(cls, user):
        """
        Generates a new key/secret pair suitable for symmetric JWT signing.

        This method must be run within a db transaction.
        Returns an instance of APIKey.
        """
        key = cls.get_unique_key('user:{}:'.format(user.pk))
        return cls.objects.create(key=key,
                                  secret=cls.generate_secret(32),
                                  type=SYMMETRIC_JWT_TYPE,
                                  user=user,
                                  is_active=True)

    @classmethod
    def get_unique_key(cls, prefix, try_count=1, max_tries=1000):
        if try_count >= max_tries:
            raise RuntimeError(
                'a unique API key could not be found after {} tries'.format(
                    max_tries))

        key = '{}{}'.format(prefix, random.randint(0, 999))
        if cls.objects.filter(key=key).exists():
            return cls.get_unique_key(prefix,
                                      try_count=try_count + 1,
                                      max_tries=max_tries)
        return key

    @staticmethod
    def generate_secret(byte_length):
        """
        Return a true random ascii string containing byte_length of randomness.

        The resulting key is suitable for cryptography.
        The key will be hex encoded which means it will be twice as long
        as byte_length, i.e. 40 random bytes yields an 80 byte string.

        byte_length must be at least 32.
        """
        if byte_length < 32:  # at least 256 bit
            raise ValueError(
                '{} is too short; secrets must be longer than 32 bytes'.format(
                    byte_length))
        return force_text(binascii.b2a_hex(os.urandom(byte_length)))
예제 #16
0
class Version(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon',
                              related_name='versions',
                              on_delete=models.CASCADE)
    license = models.ForeignKey('License',
                                null=True,
                                blank=True,
                                on_delete=models.SET_NULL)
    release_notes = PurifiedField(db_column='releasenotes', short=False)
    approval_notes = models.TextField(db_column='approvalnotes',
                                      default='',
                                      null=True,
                                      blank=True)
    version = VersionStringField(max_length=255, default='0.1')

    nomination = models.DateTimeField(null=True)
    reviewed = models.DateTimeField(null=True)

    deleted = models.BooleanField(default=False)

    source = models.FileField(
        upload_to=source_upload_path,
        storage=source_upload_storage,
        null=True,
        blank=True,
        max_length=255,
    )

    channel = models.IntegerField(choices=amo.RELEASE_CHANNEL_CHOICES,
                                  default=amo.RELEASE_CHANNEL_LISTED)

    git_hash = models.CharField(max_length=40, blank=True)

    needs_human_review = models.BooleanField(default=False)

    # The order of those managers is very important: please read the lengthy
    # comment above the Addon managers declaration/instantiation.
    unfiltered = VersionManager(include_deleted=True)
    objects = VersionManager()

    # See UnfilteredVersionManagerForRelations() docstring for usage of this
    # special manager.
    unfiltered_for_relations = UnfilteredVersionManagerForRelations()

    class Meta(ModelBase.Meta):
        db_table = 'versions'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'
        ordering = ['-created', '-modified']
        indexes = [
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('license', ), name='license_id'),
        ]
        constraints = [
            models.UniqueConstraint(
                fields=('addon', 'version'),
                name='versions_addon_id_version_5a2e75b6_uniq',
            ),
        ]

    def __str__(self):
        return markupsafe.escape(self.version)

    @classmethod
    def from_upload(
        cls,
        upload,
        addon,
        channel,
        *,
        selected_apps=None,
        compatibility=None,
        parsed_data=None,
    ):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a channel id and the parsed_data generated by
        parse_addon(). Additionally, for non-themes: either a list of compatible app ids
        needs to be provided as `selected_apps`, or a list of `ApplicationVersions`
        instances for each compatible app as `compatibility`.

        If `compatibility` is provided: the `version` property of the instances will be
        set to the new upload and the instances saved. If the min and/or max properties
        of the `ApplicationVersions` instance are none then `AppVersion`s parsed from
        the manifest, or defaults, are used.

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.addons.utils import RestrictionChecker
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            compatibility = {
                app: (compatibility
                      or {}).get(app, ApplicationsVersions(application=app.id))
                for app in amo.APP_USAGE
            }
        assert selected_apps or compatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if (not getattr(upload, 'user', None) or not upload.ip_address
                or not upload.source):
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        if not upload.user.last_login_ip or not upload.user.email:
            raise VersionCreateError(
                'FileUpload user does not have some required fields')

        # This should be guaranteed by the linter, just raise an explicit
        # exception if somehow it's wrong.
        if not isinstance(parsed_data.get('install_origins', []), list):
            raise VersionCreateError(
                'install_origins was not validated properly')

        license_id = parsed_data.get('license_id')
        if not license_id and channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
            release_notes=parsed_data.get('release_notes'),
        )
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                f'New version: {version!r} ({version.id}) from {upload!r}',
                extra={
                    'email': upload.user.email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_SIGNING_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user)

        if not compatibility:
            compatibility = {
                amo.APP_IDS[app_id]: ApplicationsVersions(application=app_id)
                for app_id in selected_apps
            }

        compatible_apps = {}
        for parsed_app in parsed_data.get('apps', []):
            if parsed_app.appdata not in compatibility:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue
            avs = compatibility[parsed_app.appdata]
            avs.version = version
            avs.min = getattr(avs, 'min', parsed_app.min)
            avs.max = getattr(avs, 'max', parsed_app.max)
            avs.save()
            compatible_apps[parsed_app.appdata] = avs

        # Pre-generate compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version.compatible_apps = compatible_apps

        # Record declared install origins. base_domain is set automatically.
        if waffle.switch_is_active('record-install-origins'):
            for origin in set(parsed_data.get('install_origins', [])):
                version.installorigin_set.create(origin=origin)

        # Create relevant file.
        File.from_upload(
            upload=upload,
            version=version,
            parsed_data=parsed_data,
        )

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if (waffle.switch_is_active('enable-yara')
                or waffle.switch_is_active('enable-customs')
                or waffle.switch_is_active('enable-wat')):
            ScannerResult.objects.filter(upload_id=upload.id).update(
                version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        # Check if the approval should be restricted
        if not RestrictionChecker(upload=upload).is_auto_approval_allowed():
            flag = ('auto_approval_disabled'
                    if channel == amo.RELEASE_CHANNEL_LISTED else
                    'auto_approval_disabled_unlisted')
            reviewer_flags_defaults[flag] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version

    def license_url(self, impala=False):
        return reverse('addons.license', args=[self.addon.slug, self.version])

    def get_url_path(self):
        if self.channel == amo.RELEASE_CHANNEL_UNLISTED:
            return ''
        return reverse('addons.versions', args=[self.addon.slug])

    def delete(self, hard=False):
        # To avoid a circular import
        from .tasks import delete_preview_files

        log.info(f'Version deleted: {self!r} ({self.id})')
        activity.log_create(amo.LOG.DELETE_VERSION, self.addon,
                            str(self.version))

        if hard:
            super().delete()
        else:
            # By default we soft delete so we can keep the files for comparison
            # and a record of the version number.
            if hasattr(self, 'file'):
                # .file should always exist but we don't want to break delete regardless
                self.file.update(status=amo.STATUS_DISABLED)
            self.deleted = True
            self.save()

            # Clear pending rejection flag (we have the activity log for
            # records purposes, the flag serves no purpose anymore if the
            # version is deleted).
            VersionReviewerFlags.objects.filter(version=self).update(
                pending_rejection=None, pending_rejection_by=None)

            previews_pks = list(
                VersionPreview.objects.filter(version__id=self.id).values_list(
                    'id', flat=True))

            for preview_pk in previews_pks:
                delete_preview_files.delay(preview_pk)

    @property
    def is_user_disabled(self):
        return (self.file.status == amo.STATUS_DISABLED
                and self.file.original_status != amo.STATUS_NULL)

    @is_user_disabled.setter
    def is_user_disabled(self, disable):
        # User wants to disable (and the File isn't already).
        if disable:
            activity.log_create(amo.LOG.DISABLE_VERSION, self.addon, self)
            if (file_ := self.file) and file_.status != amo.STATUS_DISABLED:
                file_.update(original_status=file_.status,
                             status=amo.STATUS_DISABLED)
        # User wants to re-enable (and user did the disable, not Mozilla).
        else:
예제 #17
0
class EmailUserRestriction(RestrictionAbstractBaseModel, NormalizeEmailMixin):
    id = PositiveAutoField(primary_key=True)
    email_pattern = models.CharField(
        _('Email Pattern'),
        max_length=100,
        help_text=
        _('Enter full email that should be blocked or use unix-style wildcards, '
          'e.g. "*@example.com". If you need to block a domain incl subdomains, '
          'add a second entry, e.g. "*@*.example.com".'),
    )

    error_message = _('The email address used for your account is not '
                      'allowed for add-on submission.')

    class Meta:
        db_table = 'users_user_email_restriction'

    def __str__(self):
        return str(self.email_pattern)

    def save(self, **kw):
        if '@' in self.email_pattern:
            self.email_pattern = self.normalize_email(self.email_pattern)
        super().save(**kw)

    @classmethod
    def allow_submission(cls, request):
        """
        Return whether the specified request should be allowed to submit
        add-ons.
        """
        if not request.user.is_authenticated:
            return False

        return cls.allow_email(request.user.email,
                               restriction_type=RESTRICTION_TYPES.SUBMISSION)

    @classmethod
    def allow_auto_approval(cls, upload):
        if not upload.user:
            return False
        return cls.allow_email(upload.user.email,
                               restriction_type=RESTRICTION_TYPES.APPROVAL)

    @classmethod
    def allow_email(cls, email, *, restriction_type):
        """
        Return whether the specified email should be allowed to submit add-ons.
        """
        email = cls.normalize_email(email)
        restrictions = EmailUserRestriction.objects.all().filter(
            restriction_type=restriction_type)

        for restriction in restrictions:
            if fnmatchcase(email, restriction.email_pattern):
                # The following log statement is used by foxsec-pipeline.
                log.info(
                    'Restricting request from %s %s (%s)',
                    'email',
                    email,
                    'email_pattern=%s' % restriction.email_pattern,
                )
                return False

        return True
예제 #18
0
class ReviewerScore(ModelBase):
    id = PositiveAutoField(primary_key=True)
    user = models.ForeignKey(UserProfile, related_name='_reviewer_scores')
    addon = models.ForeignKey(Addon, blank=True, null=True, related_name='+')
    version = models.ForeignKey(Version,
                                blank=True,
                                null=True,
                                related_name='+')
    score = models.IntegerField()
    # For automated point rewards.
    note_key = models.SmallIntegerField(choices=amo.REVIEWED_CHOICES.items(),
                                        default=0)
    # For manual point rewards with a note.
    note = models.CharField(max_length=255)

    class Meta:
        db_table = 'reviewer_scores'
        ordering = ('-created', )

    @classmethod
    def get_key(cls, key=None, invalidate=False):
        namespace = 'riscore'
        if not key:  # Assuming we're invalidating the namespace.
            cache_ns_key(namespace, invalidate)
            return
        else:
            # Using cache_ns_key so each cache val is invalidated together.
            ns_key = cache_ns_key(namespace, invalidate)
            return '%s:%s' % (ns_key, key)

    @classmethod
    def get_event(cls,
                  addon,
                  status,
                  version=None,
                  post_review=False,
                  content_review=False):
        """Return the review event type constant.

        This is determined by the addon.type and the queue the addon is
        currently in (which is determined from the various parameters sent
        down from award_points()).

        Note: We're not using addon.status or addon.current_version because
        this is called after the status/current_version might have been updated
        by the reviewer action.

        """
        reviewed_score_name = None
        if content_review:
            # Content review always gives the same amount of points.
            reviewed_score_name = 'REVIEWED_CONTENT_REVIEW'
        elif post_review:
            # There are 4 tiers of post-review scores depending on the addon
            # weight.
            try:
                if version is None:
                    raise AutoApprovalSummary.DoesNotExist
                weight = version.autoapprovalsummary.weight
            except AutoApprovalSummary.DoesNotExist as exception:
                log.exception(
                    'No such version/auto approval summary when determining '
                    'event type to award points: %r', exception)
                weight = 0

            if addon.type == amo.ADDON_DICT:
                reviewed_score_name = 'REVIEWED_DICT_FULL'
            elif addon.type in [amo.ADDON_LPAPP, amo.ADDON_LPADDON]:
                reviewed_score_name = 'REVIEWED_LP_FULL'
            elif addon.type == amo.ADDON_SEARCH:
                reviewed_score_name = 'REVIEWED_SEARCH_FULL'
            elif weight > amo.POST_REVIEW_WEIGHT_HIGHEST_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_HIGHEST_RISK'
            elif weight > amo.POST_REVIEW_WEIGHT_HIGH_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_HIGH_RISK'
            elif weight > amo.POST_REVIEW_WEIGHT_MEDIUM_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_MEDIUM_RISK'
            else:
                reviewed_score_name = 'REVIEWED_EXTENSION_LOW_RISK'
        else:
            if status == amo.STATUS_NOMINATED:
                queue = 'FULL'
            elif status == amo.STATUS_PUBLIC:
                queue = 'UPDATE'
            else:
                queue = ''

            if (addon.type
                    in [amo.ADDON_EXTENSION, amo.ADDON_PLUGIN, amo.ADDON_API]
                    and queue):
                reviewed_score_name = 'REVIEWED_ADDON_%s' % queue
            elif addon.type == amo.ADDON_DICT and queue:
                reviewed_score_name = 'REVIEWED_DICT_%s' % queue
            elif addon.type in [amo.ADDON_LPAPP, amo.ADDON_LPADDON] and queue:
                reviewed_score_name = 'REVIEWED_LP_%s' % queue
            elif addon.type == amo.ADDON_PERSONA:
                reviewed_score_name = 'REVIEWED_PERSONA'
            elif addon.type == amo.ADDON_STATICTHEME:
                reviewed_score_name = 'REVIEWED_STATICTHEME'
            elif addon.type == amo.ADDON_SEARCH and queue:
                reviewed_score_name = 'REVIEWED_SEARCH_%s' % queue
            elif addon.type == amo.ADDON_THEME and queue:
                reviewed_score_name = 'REVIEWED_XUL_THEME_%s' % queue

        if reviewed_score_name:
            return getattr(amo, reviewed_score_name)
        return None

    @classmethod
    def award_points(cls,
                     user,
                     addon,
                     status,
                     version=None,
                     post_review=False,
                     content_review=False,
                     extra_note=''):
        """Awards points to user based on an event and the queue.

        `event` is one of the `REVIEWED_` keys in constants.
        `status` is one of the `STATUS_` keys in constants.
        `version` is the `Version` object that was affected by the review.
        `post_review` is set to True if the add-on was auto-approved and the
                      reviewer is confirming/rejecting post-approval.
        `content_review` is set to True if it's a content-only review of an
                         auto-approved add-on.

        """

        # If a webextension file gets approved manually (e.g. because
        # auto-approval is disabled), 'post-review' is set to False, treating
        # the file as a legacy file which is not what we want. The file is
        # still a webextension and should treated as such, regardless of
        # auto-approval being disabled or not.
        # As a hack, we set 'post_review' to True.
        if (version and version.is_webextension
                and addon.type in amo.GROUP_TYPE_ADDON):
            post_review = True

        user_log.info(
            (u'Determining award points for user %s for version %s of addon %s'
             % (user, version, addon.id)).encode('utf-8'))

        event = cls.get_event(addon,
                              status,
                              version=version,
                              post_review=post_review,
                              content_review=content_review)
        score = amo.REVIEWED_SCORES.get(event)

        user_log.info(
            (u'Determined %s award points (event: %s) for user %s for version '
             u'%s of addon %s' %
             (score, event, user, version, addon.id)).encode('utf-8'))

        # Add bonus to reviews greater than our limit to encourage fixing
        # old reviews. Does not apply to content-review/post-review at the
        # moment, because it would need to be calculated differently.
        award_overdue_bonus = (version and version.nomination
                               and not post_review and not content_review)
        if award_overdue_bonus:
            waiting_time_days = (datetime.now() - version.nomination).days
            days_over = waiting_time_days - amo.REVIEWED_OVERDUE_LIMIT
            if days_over > 0:
                bonus = days_over * amo.REVIEWED_OVERDUE_BONUS
                score = score + bonus

        if score is not None:
            cls.objects.create(user=user,
                               addon=addon,
                               score=score,
                               note_key=event,
                               note=extra_note,
                               version=version)
            cls.get_key(invalidate=True)
            user_log.info(
                (u'Awarding %s points to user %s for "%s" for addon %s' %
                 (score, user, amo.REVIEWED_CHOICES[event],
                  addon.id)).encode('utf-8'))
        return score

    @classmethod
    def award_moderation_points(cls, user, addon, review_id, undo=False):
        """Awards points to user based on moderated review."""
        event = (amo.REVIEWED_ADDON_REVIEW
                 if not undo else amo.REVIEWED_ADDON_REVIEW_POORLY)
        score = amo.REVIEWED_SCORES.get(event)

        cls.objects.create(user=user, addon=addon, score=score, note_key=event)
        cls.get_key(invalidate=True)
        user_log.info(u'Awarding %s points to user %s for "%s" for review %s' %
                      (score, user, amo.REVIEWED_CHOICES[event], review_id))

    @classmethod
    def get_total(cls, user):
        """Returns total points by user."""
        key = cls.get_key('get_total:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        val = (ReviewerScore.objects.filter(user=user).aggregate(
            total=Sum('score')).values())[0]
        if val is None:
            val = 0

        cache.set(key, val, None)
        return val

    @classmethod
    def get_recent(cls, user, limit=5, addon_type=None):
        """Returns most recent ReviewerScore records."""
        key = cls.get_key('get_recent:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        val = ReviewerScore.objects.filter(user=user)
        if addon_type is not None:
            val.filter(addon__type=addon_type)

        val = list(val[:limit])
        cache.set(key, val, None)
        return val

    @classmethod
    def get_breakdown(cls, user):
        """Returns points broken down by addon type."""
        key = cls.get_key('get_breakdown:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        sql = """
             SELECT `reviewer_scores`.*,
                    SUM(`reviewer_scores`.`score`) AS `total`,
                    `addons`.`addontype_id` AS `atype`
             FROM `reviewer_scores`
             LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
             WHERE `reviewer_scores`.`user_id` = %s
             GROUP BY `addons`.`addontype_id`
             ORDER BY `total` DESC
        """
        val = list(ReviewerScore.objects.raw(sql, [user.id]))
        cache.set(key, val, None)
        return val

    @classmethod
    def get_breakdown_since(cls, user, since):
        """
        Returns points broken down by addon type since the given datetime.
        """
        key = cls.get_key('get_breakdown:%s:%s' % (user.id, since.isoformat()))
        val = cache.get(key)
        if val is not None:
            return val

        sql = """
             SELECT `reviewer_scores`.*,
                    SUM(`reviewer_scores`.`score`) AS `total`,
                    `addons`.`addontype_id` AS `atype`
             FROM `reviewer_scores`
             LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
             WHERE `reviewer_scores`.`user_id` = %s AND
                   `reviewer_scores`.`created` >= %s
             GROUP BY `addons`.`addontype_id`
             ORDER BY `total` DESC
        """
        val = list(ReviewerScore.objects.raw(sql, [user.id, since]))
        cache.set(key, val, 3600)
        return val

    @classmethod
    def _leaderboard_list(cls, since=None, types=None, addon_type=None):
        """
        Returns base leaderboard list. Each item will be a tuple containing
        (user_id, name, total).
        """

        reviewers = (UserProfile.objects.filter(
            groups__name__startswith='Reviewers: ').exclude(
                groups__name__in=('Staff', 'Admins',
                                  'No Reviewer Incentives')).distinct())
        qs = (cls.objects.values_list('user__id').filter(
            user__in=reviewers).annotate(
                total=Sum('score')).order_by('-total'))

        if since is not None:
            qs = qs.filter(created__gte=since)

        if types is not None:
            qs = qs.filter(note_key__in=types)

        if addon_type is not None:
            qs = qs.filter(addon__type=addon_type)

        users = {reviewer.pk: reviewer for reviewer in reviewers}
        return [(item[0], users.get(item[0], UserProfile()).name, item[1])
                for item in qs]

    @classmethod
    def get_leaderboards(cls, user, days=7, types=None, addon_type=None):
        """Returns leaderboards with ranking for the past given days.

        This will return a dict of 3 items::

            {'leader_top': [...],
             'leader_near: [...],
             'user_rank': (int)}

        If the user is not in the leaderboard, or if the user is in the top 5,
        'leader_near' will be an empty list and 'leader_top' will contain 5
        elements instead of the normal 3.

        """
        key = cls.get_key('get_leaderboards:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        week_ago = date.today() - timedelta(days=days)

        leader_top = []
        leader_near = []

        leaderboard = cls._leaderboard_list(since=week_ago,
                                            types=types,
                                            addon_type=addon_type)

        scores = []

        user_rank = 0
        in_leaderboard = False
        for rank, row in enumerate(leaderboard, 1):
            user_id, name, total = row
            scores.append({
                'user_id': user_id,
                'name': name,
                'rank': rank,
                'total': int(total),
            })
            if user_id == user.id:
                user_rank = rank
                in_leaderboard = True

        if not in_leaderboard:
            leader_top = scores[:5]
        else:
            if user_rank <= 5:  # User is in top 5, show top 5.
                leader_top = scores[:5]
            else:
                leader_top = scores[:3]
                leader_near = [scores[user_rank - 2], scores[user_rank - 1]]
                try:
                    leader_near.append(scores[user_rank])
                except IndexError:
                    pass  # User is last on the leaderboard.

        val = {
            'leader_top': leader_top,
            'leader_near': leader_near,
            'user_rank': user_rank,
        }
        cache.set(key, val, None)
        return val

    @classmethod
    def all_users_by_score(cls):
        """
        Returns reviewers ordered by highest total points first.
        """
        leaderboard = cls._leaderboard_list()
        scores = []

        for row in leaderboard:
            user_id, name, total = row
            user_level = len(amo.REVIEWED_LEVELS) - 1
            for i, level in enumerate(amo.REVIEWED_LEVELS):
                if total < level['points']:
                    user_level = i - 1
                    break

            # Only show level if it changes.
            if user_level < 0:
                level = ''
            else:
                level = unicode(amo.REVIEWED_LEVELS[user_level]['name'])

            scores.append({
                'user_id': user_id,
                'name': name,
                'total': int(total),
                'level': level,
            })

        prev = None
        for score in reversed(scores):
            if score['level'] == prev:
                score['level'] = ''
            else:
                prev = score['level']

        return scores
예제 #19
0
class Collection(ModelBase):
    id = PositiveAutoField(primary_key=True)
    TYPE_CHOICES = amo.COLLECTION_CHOICES.items()

    uuid = models.UUIDField(blank=True, unique=True, null=True)
    name = TranslatedField(require_locale=False)
    # nickname is deprecated.  Use slug.
    nickname = models.CharField(max_length=30, blank=True, unique=True,
                                null=True)
    slug = models.CharField(max_length=30, blank=True, null=True)

    description = NoLinksNoMarkupField(require_locale=False)
    default_locale = models.CharField(max_length=10, default='en-US',
                                      db_column='defaultlocale')
    type = models.PositiveIntegerField(db_column='collection_type',
                                       choices=TYPE_CHOICES, default=0)

    listed = models.BooleanField(
        default=True, help_text='Collections are either listed or private.')

    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id',
                                              blank=True, null=True)
    addon_count = models.PositiveIntegerField(default=0,
                                              db_column='addonCount')

    addons = models.ManyToManyField(
        Addon, through='CollectionAddon', related_name='collections')
    author = models.ForeignKey(
        UserProfile, null=True, related_name='collections',
        on_delete=models.CASCADE)

    objects = CollectionManager()

    class Meta(ModelBase.Meta):
        db_table = 'collections'
        indexes = [
            models.Index(fields=('application',), name='application_id'),
            models.Index(fields=('created',), name='created_idx'),
            models.Index(fields=('listed',), name='listed'),
            models.Index(fields=('slug',), name='slug_idx'),
            models.Index(fields=('type',), name='type_idx'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('author', 'slug'),
                                    name='author_id'),
        ]

    def __str__(self):
        return u'%s (%s)' % (self.name, self.addon_count)

    def save(self, **kw):
        if not self.uuid:
            self.uuid = uuid.uuid4()
        if not self.slug:
            # Work with both, strings (if passed manually on .create()
            # and UUID instances)
            self.slug = str(self.uuid).replace('-', '')[:30]
        self.clean_slug()

        super(Collection, self).save(**kw)

    def clean_slug(self):
        if self.type in SPECIAL_SLUGS:
            self.slug = SPECIAL_SLUGS[self.type]
            return

        if self.slug in SPECIAL_SLUGS.values():
            self.slug += '~'

        if not self.author:
            return

        qs = self.author.collections.using('default')
        slugs = dict((slug, id) for slug, id in qs.values_list('slug', 'id'))
        if self.slug in slugs and slugs[self.slug] != self.id:
            for idx in range(len(slugs)):
                new = '%s-%s' % (self.slug, idx + 1)
                if new not in slugs:
                    self.slug = new
                    return

    def get_url_path(self):
        return reverse('collections.detail',
                       args=[self.author_id, self.slug])

    def get_abs_url(self):
        return absolutify(self.get_url_path())

    @classmethod
    def get_fallback(cls):
        return cls._meta.get_field('default_locale')

    def add_addon(self, addon):
        CollectionAddon.objects.get_or_create(addon=addon, collection=self)

    def remove_addon(self, addon):
        CollectionAddon.objects.filter(addon=addon, collection=self).delete()

    def owned_by(self, user):
        return user.id == self.author_id

    def can_view_stats(self, request):
        if request and request.user:
            return (self.owned_by(request.user) or
                    acl.action_allowed(request,
                                       amo.permissions.COLLECTION_STATS_VIEW))
        return False

    def is_public(self):
        return self.listed

    @staticmethod
    def transformer(collections):
        if not collections:
            return
        author_ids = set(c.author_id for c in collections)
        authors = dict((u.id, u) for u in
                       UserProfile.objects.filter(id__in=author_ids))
        for c in collections:
            c.author = authors.get(c.author_id)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        from . import tasks
        if kwargs.get('raw'):
            return
        tasks.collection_meta.delay(instance.id)

    def index_addons(self, addons=None):
        """Index add-ons belonging to that collection."""
        from olympia.addons.tasks import index_addons
        addon_ids = [addon.id for addon in (addons or self.addons.all())]
        if addon_ids:
            index_addons.delay(addon_ids)

    def check_ownership(self, request, require_owner, require_author,
                        ignore_disabled, admin):
        """
        Used by acl.check_ownership to see if request.user has permissions for
        the collection.
        """
        from olympia.access import acl
        return acl.check_collection_ownership(request, self, require_owner)
예제 #20
0
class Collection(ModelBase):
    id = PositiveAutoField(primary_key=True)
    TYPE_CHOICES = amo.COLLECTION_CHOICES.items()

    uuid = models.UUIDField(blank=True, unique=True, null=True)
    name = TranslatedField(require_locale=False)
    # nickname is deprecated.  Use slug.
    nickname = models.CharField(max_length=30,
                                blank=True,
                                unique=True,
                                null=True)
    slug = models.CharField(max_length=30, blank=True, null=True)

    description = NoLinksNoMarkupField(require_locale=False)
    default_locale = models.CharField(max_length=10,
                                      default='en-US',
                                      db_column='defaultlocale')
    type = models.PositiveIntegerField(db_column='collection_type',
                                       choices=TYPE_CHOICES,
                                       default=0)

    listed = models.BooleanField(
        default=True, help_text='Collections are either listed or private.')

    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id',
                                              blank=True,
                                              null=True,
                                              db_index=True)
    addon_count = models.PositiveIntegerField(default=0,
                                              db_column='addonCount')

    all_personas = models.BooleanField(
        default=False, help_text='Does this collection only contain Themes?')

    addons = models.ManyToManyField(Addon,
                                    through='CollectionAddon',
                                    related_name='collections')
    author = models.ForeignKey(UserProfile,
                               null=True,
                               related_name='collections')

    objects = CollectionManager()

    class Meta(ModelBase.Meta):
        db_table = 'collections'
        unique_together = (('author', 'slug'), )

    def __str__(self):
        return u'%s (%s)' % (self.name, self.addon_count)

    def save(self, **kw):
        if not self.uuid:
            self.uuid = uuid.uuid4()
        if not self.slug:
            # Work with both, strings (if passed manually on .create()
            # and UUID instances)
            self.slug = str(self.uuid).replace('-', '')[:30]
        self.clean_slug()

        super(Collection, self).save(**kw)

    def clean_slug(self):
        if self.type in SPECIAL_SLUGS:
            self.slug = SPECIAL_SLUGS[self.type]
            return

        if self.slug in SPECIAL_SLUGS.values():
            self.slug += '~'

        if not self.author:
            return

        qs = self.author.collections.using('default')
        slugs = dict((slug, id) for slug, id in qs.values_list('slug', 'id'))
        if self.slug in slugs and slugs[self.slug] != self.id:
            for idx in range(len(slugs)):
                new = '%s-%s' % (self.slug, idx + 1)
                if new not in slugs:
                    self.slug = new
                    return

    def get_url_path(self):
        return reverse('collections.detail', args=[self.author_id, self.slug])

    def get_abs_url(self):
        return absolutify(self.get_url_path())

    def edit_url(self):
        return reverse('collections.edit', args=[self.author_id, self.slug])

    def delete_url(self):
        return reverse('collections.delete', args=[self.author_id, self.slug])

    def share_url(self):
        return reverse('collections.share', args=[self.author_id, self.slug])

    def stats_url(self):
        return reverse('collections.stats', args=[self.author_id, self.slug])

    @classmethod
    def get_fallback(cls):
        return cls._meta.get_field('default_locale')

    def set_addons(self, addon_ids, comments=None):
        """Replace the current add-ons with a new list of add-on ids."""
        if comments is None:
            comments = {}
        order = {a: idx for idx, a in enumerate(addon_ids)}

        # Partition addon_ids into add/update/remove buckets.
        existing = set(
            self.addons.using('default').values_list('id', flat=True))
        add, update = [], []
        for addon in addon_ids:
            bucket = update if addon in existing else add
            bucket.append((addon, order[addon]))
        remove = existing.difference(addon_ids)
        now = datetime.now()

        with connection.cursor() as cursor:
            if remove:
                cursor.execute("DELETE FROM addons_collections "
                               "WHERE collection_id=%s AND addon_id IN (%s)" %
                               (self.id, ','.join(map(str, remove))))
                if self.listed:
                    for addon in remove:
                        activity.log_create(amo.LOG.REMOVE_FROM_COLLECTION,
                                            (Addon, addon), self)
            if add:
                insert = '(%s, %s, %s, NOW(), NOW())'
                values = [insert % (a, self.id, idx) for a, idx in add]
                cursor.execute("""
                    INSERT INTO addons_collections
                        (addon_id, collection_id, ordering, created, modified)
                    VALUES %s""" % ','.join(values))
                if self.listed:
                    for addon_id, idx in add:
                        activity.log_create(amo.LOG.ADD_TO_COLLECTION,
                                            (Addon, addon_id), self)
        for addon, ordering in update:
            (CollectionAddon.objects.filter(
                collection=self.id, addon=addon).update(ordering=ordering,
                                                        modified=now))

        for addon, comment in six.iteritems(comments):
            try:
                c = (CollectionAddon.objects.using('default').get(
                    collection=self.id, addon=addon))
            except CollectionAddon.DoesNotExist:
                pass
            else:
                c.comments = comment
                c.save(force_update=True)

        self.save()

    def add_addon(self, addon):
        "Adds an addon to the collection."
        CollectionAddon.objects.get_or_create(addon=addon, collection=self)
        if self.listed:
            activity.log_create(amo.LOG.ADD_TO_COLLECTION, addon, self)
        self.save()  # To invalidate Collection.

    def remove_addon(self, addon):
        CollectionAddon.objects.filter(addon=addon, collection=self).delete()
        if self.listed:
            activity.log_create(amo.LOG.REMOVE_FROM_COLLECTION, addon, self)
        self.save()  # To invalidate Collection.

    def owned_by(self, user):
        return user.id == self.author_id

    def can_view_stats(self, request):
        if request and request.user:
            return (self.owned_by(request.user) or acl.action_allowed(
                request, amo.permissions.COLLECTION_STATS_VIEW))
        return False

    def is_public(self):
        return self.listed

    def is_featured(self):
        return FeaturedCollection.objects.filter(collection=self).exists()

    @staticmethod
    def transformer(collections):
        if not collections:
            return
        author_ids = set(c.author_id for c in collections)
        authors = dict(
            (u.id, u) for u in UserProfile.objects.filter(id__in=author_ids))
        for c in collections:
            c.author = authors.get(c.author_id)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        from . import tasks
        if kwargs.get('raw'):
            return
        tasks.collection_meta.delay(instance.id)
        if instance.is_featured():
            Collection.update_featured_status(sender, instance, **kwargs)

    @staticmethod
    def post_delete(sender, instance, **kwargs):
        if kwargs.get('raw'):
            return
        if instance.is_featured():
            Collection.update_featured_status(sender, instance, **kwargs)

    @staticmethod
    def update_featured_status(sender, instance, **kwargs):
        from olympia.addons.tasks import index_addons
        addons = kwargs.get('addons',
                            [addon.id for addon in instance.addons.all()])
        if addons:
            clear_get_featured_ids_cache(None, None)
            index_addons.delay(addons)

    def check_ownership(self, request, require_owner, require_author,
                        ignore_disabled, admin):
        """
        Used by acl.check_ownership to see if request.user has permissions for
        the collection.
        """
        from olympia.access import acl
        return acl.check_collection_ownership(request, self, require_owner)
예제 #21
0
class ValidationJob(ModelBase):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id')
    curr_max_version = models.ForeignKey(AppVersion,
                                         related_name='validation_current_set')
    target_version = models.ForeignKey(AppVersion,
                                       related_name='validation_target_set')
    finish_email = models.EmailField(null=True, max_length=75)
    completed = models.DateTimeField(null=True, db_index=True)
    creator = models.ForeignKey('users.UserProfile', null=True)

    def result_passing(self):
        return self.result_set.exclude(completed=None).filter(errors=0,
                                                              task_error=None)

    def result_completed(self):
        return self.result_set.exclude(completed=None)

    def result_errors(self):
        return self.result_set.exclude(task_error=None)

    def result_failing(self):
        return self.result_set.exclude(completed=None).filter(errors__gt=0)

    def is_complete(self, as_int=False):
        completed = self.completed is not None
        if as_int:
            return 1 if completed else 0
        else:
            return completed

    @property
    def stats(self):
        if not hasattr(self, '_stats'):
            self._stats = self._count_stats()
        return self._stats

    def _count_stats(self):
        total = self.result_set.count()
        completed = self.result_completed().count()
        passing = self.result_passing().count()
        errors = self.result_errors().count()
        failing = self.result_failing().count()
        return {
            'job_id':
            self.pk,
            'total':
            total,
            'completed':
            completed,
            'completed_timestamp':
            str(self.completed or ''),
            'passing':
            passing,
            'failing':
            failing,
            'errors':
            errors,
            'percent_complete':
            (Decimal(completed) / Decimal(total) * Decimal(100) if
             (total and completed) else 0),
        }

    class Meta:
        db_table = 'validation_job'
예제 #22
0
class Collection(ModelBase):
    id = PositiveAutoField(primary_key=True)

    uuid = models.UUIDField(blank=True, unique=True, null=True)
    name = TranslatedField(require_locale=False)
    slug = models.CharField(max_length=30, blank=True, null=True)

    # description can (and sometimes does) contain html and other unsanitized
    # content. It must be cleaned before display - NoURLsField just strips the
    # URL without doing any escaping.
    description = NoURLsField(require_locale=False)
    default_locale = models.CharField(
        max_length=10, default='en-US', db_column='defaultlocale'
    )
    listed = models.BooleanField(
        default=True, help_text='Collections are either listed or private.'
    )

    addon_count = models.PositiveIntegerField(default=0, db_column='addonCount')

    addons = models.ManyToManyField(
        Addon, through='CollectionAddon', related_name='collections'
    )
    author = models.ForeignKey(
        UserProfile, null=True, related_name='collections', on_delete=models.CASCADE
    )

    objects = CollectionManager()

    class Meta(ModelBase.Meta):
        db_table = 'collections'
        indexes = [
            models.Index(fields=('created',), name='collections_created_idx'),
            models.Index(fields=('listed',), name='collections_listed_idx'),
            models.Index(fields=('slug',), name='collections_slug_idx'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('author', 'slug'), name='author_id'),
        ]

    def __str__(self):
        return f'{self.name} ({self.addon_count})'

    def save(self, **kw):
        if not self.uuid:
            self.uuid = uuid.uuid4()
        if not self.slug:
            # Work with both, strings (if passed manually on .create()
            # and UUID instances)
            self.slug = str(self.uuid).replace('-', '')[:30]
        self.clean_slug()

        super().save(**kw)

    def clean_slug(self):
        if not self.author:
            return

        qs = self.author.collections.using('default')
        slugs = {slug: id for slug, id in qs.values_list('slug', 'id')}
        if self.slug in slugs and slugs[self.slug] != self.id:
            for idx in range(len(slugs)):
                new = f'{self.slug}-{idx + 1}'
                if new not in slugs:
                    self.slug = new
                    return

    def get_url_path(self):
        return reverse('collections.detail', args=[self.author_id, self.slug])

    @classmethod
    def get_fallback(cls):
        return cls._meta.get_field('default_locale')

    def add_addon(self, addon):
        CollectionAddon.objects.get_or_create(addon=addon, collection=self)

    def remove_addon(self, addon):
        CollectionAddon.objects.filter(addon=addon, collection=self).delete()

    def owned_by(self, user):
        return user.id == self.author_id

    def is_public(self):
        return self.listed

    @staticmethod
    def transformer(collections):
        if not collections:
            return
        author_ids = {c.author_id for c in collections}
        authors = {u.id: u for u in UserProfile.objects.filter(id__in=author_ids)}
        for c in collections:
            c.author = authors.get(c.author_id)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        from . import tasks

        if kwargs.get('raw'):
            return
        tasks.collection_meta.delay(instance.id)

    def check_ownership(
        self, request, require_owner, require_author, ignore_disabled, admin
    ):
        """
        Used by acl.check_ownership to see if request.user has permissions for
        the collection.
        """
        from olympia.access import acl

        return acl.check_collection_ownership(request, self, require_owner)
예제 #23
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.OneToOneField('versions.Version',
                                   on_delete=models.CASCADE)
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    strict_compatibility = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)
    # The manifest_version defined in manifest.json
    manifest_version = models.SmallIntegerField(
        default=DEFAULT_MANIFEST_VERSION)

    class Meta(ModelBase.Meta):
        db_table = 'files'
        indexes = [
            models.Index(fields=('created', 'version'), name='created_idx'),
            models.Index(fields=('datestatuschanged', 'version'),
                         name='statuschanged_idx'),
            models.Index(fields=('status', ), name='status'),
        ]

    def __str__(self):
        return str(self.id)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    def get_url_path(self, attachment=False):
        # We allow requests to not specify a filename, but it's mandatory that
        # we include it in our responses, because Fenix intercepts the
        # downloads using a regex and expects the filename to be part of the
        # URL - it even wants the filename to end with `.xpi` - though it
        # doesn't care about what's after the path, so any query string is ok.
        # See https://github.com/mozilla-mobile/fenix/blob/
        # 07d43971c0767fc023996dc32eb73e3e37c6517a/app/src/main/java/org/mozilla/fenix/
        # AppRequestInterceptor.kt#L173
        kwargs = {'file_id': self.pk, 'filename': self.filename}
        if attachment:
            kwargs['download_type'] = 'attachment'
        return reverse('downloads.file', kwargs=kwargs)

    @classmethod
    def from_upload(cls, upload, version, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version and the parsed_data
        generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version)
        upload_path = force_str(nfd_str(upload.path))
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)
        file_.is_signed = file_.is_mozilla_signed_extension
        file_.filename = file_.generate_filename()

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.manifest_version = parsed_data.get('manifest_version',
                                                 DEFAULT_MANIFEST_VERSION)
        file_.save()

        permissions = list(parsed_data.get('permissions', []))
        optional_permissions = list(parsed_data.get('optional_permissions',
                                                    []))

        # devtools_page isn't in permissions block but treated as one
        # if a custom devtools page is added by an addon
        if 'devtools_page' in parsed_data:
            permissions.append('devtools')

        # Add content_scripts host matches too.
        for script in parsed_data.get('content_scripts', []):
            permissions.extend(script.get('matches', []))
        if permissions or optional_permissions:
            WebextPermission.objects.create(
                permissions=permissions,
                optional_permissions=optional_permissions,
                file=file_,
            )
        # site_permissions are not related to webext permissions (they are
        # Web APIs a particular site can enable with a specially generated
        # add-on) and thefore are stored separately.
        if parsed_data.get('type') == amo.ADDON_SITE_PERMISSION:
            site_permissions = list(parsed_data.get('site_permissions', []))
            FileSitePermission.objects.create(
                permissions=site_permissions,
                file=file_,
            )

        log.info(f'New file: {file_!r} from {upload!r}')

        # Move the uploaded file from the temp location.
        storage.copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        with open(filename or self.current_file_path, 'rb') as fobj:
            return f'sha256:{get_sha256(fobj)}'

    def generate_filename(self):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}
        (-{platform} for some of the old ones from back when we had multiple
         platforms)

        By convention, newly signed files after 2022-03-31 get a .xpi
        extension, unsigned get .zip. This helps ensure CDN cache is busted
        when we sign something.
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted(a.shortername for a in self.version.compatible_apps))
            parts.append(apps)

        file_extension = '.xpi' if self.is_signed else '.zip'
        return '-'.join(parts) + file_extension

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return '{}...{}'.format(
            m.group('slug')[0:(maxlen - 3)], m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {
            'addon_id': addon.slug,
            'filename': f'addon-{addon.pk}-latest{self.extension}',
        }
        if attachment:
            kw['download_type'] = 'attachment'
        return reverse('downloads.latest', kwargs=kw)

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def fallback_file_path(self):
        """Fallback path in case the file was disabled/re-enabled and not yet
        moved - sort of the opposite to current_file_path. This should only be
        used for things like code search or git extraction where we really want
        the file contents no matter what."""
        return (self.file_path if self.current_file_path
                == self.guarded_file_path else self.guarded_file_path)

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source_path, destination_path, log_message):
        """Move a file from `source_path` to `destination_path` and delete the
        source directory if it's empty once the file has been successfully
        moved.

        Meant to move files from/to the guarded file path as they are disabled
        or re-enabled.

        IOError and UnicodeEncodeError are caught and logged."""
        log_message = force_str(log_message)
        try:
            if storage.exists(source_path):
                source_parent_path = os.path.dirname(source_path)
                log.info(
                    log_message.format(source=source_path,
                                       destination=destination_path))
                storage.move_stored_file(source_path, destination_path)
                # Now that the file has been deleted, remove the directory if
                # it exists to prevent the main directory from growing too
                # much (#11464)
                remaining_dirs, remaining_files = storage.listdir(
                    source_parent_path)
                if len(remaining_dirs) == len(remaining_files) == 0:
                    storage.delete(source_parent_path)
        except (UnicodeEncodeError, OSError):
            msg = f'Move Failure: {source_path} {destination_path}'
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a file from the public path to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        """Move a file from guarded file path to the public file path."""
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    @cached_property
    def permissions(self):
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []

    @cached_property
    def optional_permissions(self):
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate optional permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.optional_permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []