Ejemplo n.º 1
0
class FileValidation(ModelBase):
    id = PositiveAutoField(primary_key=True)
    file = models.OneToOneField(File,
                                related_name='validation',
                                on_delete=models.CASCADE)
    valid = models.BooleanField(default=False)
    errors = models.IntegerField(default=0)
    warnings = models.IntegerField(default=0)
    notices = models.IntegerField(default=0)
    validation = models.TextField()

    class Meta:
        db_table = 'file_validation'

    @classmethod
    def from_json(cls, file, validation):
        if isinstance(validation, str):
            validation = json.loads(validation)

        if 'metadata' in validation:
            if (validation['metadata'].get('contains_binary_extension')
                    or validation['metadata'].get('contains_binary_content')):
                file.update(binary=True)

            if validation['metadata'].get('binary_components'):
                file.update(binary_components=True)

        # Delete any past results.
        # We most often wind up with duplicate results when multiple requests
        # for the same validation data are POSTed at the same time, which we
        # currently do not have the ability to track.
        cls.objects.filter(file=file).delete()

        return cls.objects.create(file=file,
                                  validation=json.dumps(validation),
                                  errors=validation['errors'],
                                  warnings=validation['warnings'],
                                  notices=validation['notices'],
                                  valid=validation['errors'] == 0)

    @property
    def processed_validation(self):
        """Return processed validation results as expected by the frontend."""
        # Import loop.
        from olympia.devhub.utils import process_validation
        return process_validation(json.loads(self.validation),
                                  file_hash=self.file.original_hash,
                                  channel=self.file.version.channel)
Ejemplo n.º 2
0
class EmailUserRestriction(
        GetErrorMessageMixin, NormalizeEmailMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    email_pattern = models.CharField(
        _('Email Pattern'),
        max_length=100,
        help_text=_(
            'Either enter full domain or email that should be blocked or use '
            ' glob-style wildcards to match other patterns.'
            ' E.g "@*.mail.com"\n'
            ' Please note that we do not include "@" in the match so you '
            ' should do that in the pattern.'))

    error_message = _('The email address used for your account is not '
                      'allowed for add-on submission.')

    class Meta:
        db_table = 'users_user_email_restriction'

    def __str__(self):
        return str(self.email_pattern)

    @classmethod
    def allow_request(cls, request):
        """
        Return whether the specified request should be allowed to submit
        add-ons.
        """
        if not request.user.is_authenticated:
            return False

        return cls.allow_email(cls.normalize_email(request.user.email))

    @classmethod
    def allow_email(cls, email):
        """
        Return whether the specified email should be allowed to submit add-ons.
        """
        restrictions = EmailUserRestriction.objects.all()

        for restriction in restrictions:
            if fnmatchcase(email, restriction.email_pattern):
                log.info('Restricting request from %s %s (%s)',
                         'email', email,
                         'email_pattern=%s' % restriction.email_pattern)
                return False

        return True
Ejemplo n.º 3
0
class UpdateCount(StatsSearchMixin, models.Model):
    id = PositiveAutoField(primary_key=True)
    # Has an index `addon_id` in our dev, stage and prod database
    addon = models.ForeignKey('addons.Addon')
    # Has an index named `count` in our dev, stage and prod database
    count = models.PositiveIntegerField(db_index=True)
    # Has an index named `date` in our dev, stage and prod database
    date = models.DateField(db_index=True)
    versions = JSONField(db_column='version', null=True)
    statuses = JSONField(db_column='status', null=True)
    applications = JSONField(db_column='application', null=True)
    oses = JSONField(db_column='os', null=True)
    locales = JSONField(db_column='locale', null=True)

    class Meta:
        db_table = 'update_counts'
Ejemplo n.º 4
0
class AddonTag(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(
        'addons.Addon', related_name='addon_tags', on_delete=models.CASCADE
    )
    tag = models.ForeignKey(Tag, related_name='addon_tags', on_delete=models.CASCADE)

    class Meta:
        db_table = 'users_tags_addons'
        indexes = [
            models.Index(fields=('tag',), name='tag_id'),
            models.Index(fields=('addon',), name='addon_id'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('tag', 'addon'), name='tag_id_2'),
        ]
Ejemplo n.º 5
0
class CollectionAddon(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
    collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
    # category (deprecated: for "Fashion Your Firefox")
    comments = LinkifiedField(null=True)
    user = models.ForeignKey(UserProfile, null=True, on_delete=models.CASCADE)

    ordering = models.PositiveIntegerField(
        default=0,
        help_text='Add-ons are displayed in ascending order '
        'based on this field.')

    class Meta(ModelBase.Meta):
        db_table = 'addons_collections'
        indexes = [
            models.Index(fields=('collection', 'created'), name='created_idx'),
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('collection', ), name='collection_id'),
            models.Index(fields=('user', ), name='user_id'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('addon', 'collection'),
                                    name='addon_id_2'),
        ]

    @staticmethod
    def post_save(sender, instance, **kwargs):
        """Update Collection.addon_count and reindex add-on if the collection
        is featured."""
        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(amo.LOG.ADD_TO_COLLECTION, instance.addon,
                                instance.collection)
        Collection.post_save(sender, instance.collection, **kwargs)

    @staticmethod
    def post_delete(sender, instance, **kwargs):
        from . import tasks

        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(amo.LOG.REMOVE_FROM_COLLECTION, instance.addon,
                                instance.collection)
        tasks.collection_meta.delay(instance.collection.id)
Ejemplo n.º 6
0
class RereviewQueueTheme(ModelBase):
    id = PositiveAutoField(primary_key=True)
    theme = models.ForeignKey(Persona, on_delete=models.CASCADE)
    header = models.CharField(max_length=72, blank=True, default='')

    # Holds whether this reuploaded theme is a duplicate.
    dupe_persona = models.ForeignKey(Persona,
                                     null=True,
                                     related_name='dupepersona',
                                     on_delete=models.CASCADE)

    # The order of those managers is very important: please read the lengthy
    # comment above the Addon managers declaration/instantiation.
    unfiltered = RereviewQueueThemeManager(include_deleted=True)
    objects = RereviewQueueThemeManager()

    class Meta:
        db_table = 'rereview_queue_theme'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'

    def __str__(self):
        return str(self.id)

    @property
    def header_path(self):
        """Return the path to the header image."""
        return self.theme._image_path(self.header or self.theme.header)

    @property
    def footer_path(self):
        """Return the path to the optional footer image."""
        footer = self.footer or self.theme.footer
        return footer and self.theme._image_path(footer) or ''

    @property
    def header_url(self):
        """Return the url of the header imager."""
        return self.theme._image_url(self.header or self.theme.header)

    @property
    def footer_url(self):
        """Return the url of the optional footer image."""
        footer = self.footer or self.theme.footer
        return footer and self.theme._image_url(footer) or ''
Ejemplo n.º 7
0
class RssKey(models.Model):
    id = PositiveAutoField(primary_key=True)
    key = models.UUIDField(db_column='rsskey',
                           unique=True,
                           null=True,
                           default=uuid.uuid4)
    addon = models.ForeignKey(Addon,
                              null=True,
                              unique=True,
                              on_delete=models.CASCADE)
    user = models.ForeignKey(UserProfile,
                             null=True,
                             unique=True,
                             on_delete=models.CASCADE)
    created = models.DateField(default=datetime.now)

    class Meta:
        db_table = 'hubrsskeys'
Ejemplo n.º 8
0
class FeaturedCollection(ModelBase):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id')
    collection = models.ForeignKey(Collection)
    locale = models.CharField(max_length=10, null=True)

    class Meta:
        db_table = 'featured_collections'

    def __str__(self):
        return u'%s (%s: %s)' % (self.collection, self.application,
                                 self.locale)

    @staticmethod
    def post_save_or_delete(sender, instance, **kwargs):
        Collection.update_featured_status(FeaturedCollection,
                                          instance.collection, **kwargs)
Ejemplo n.º 9
0
class DraftComment(ModelBase):
    """A model that allows us to draft comments for reviews before we have
    an ActivityLog instance ready.

    This is being used by the commenting API by the code-manager.
    """
    id = PositiveAutoField(primary_key=True)
    version = models.ForeignKey(Version, on_delete=models.CASCADE)
    user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
    filename = models.CharField(max_length=255, null=True, blank=True)
    lineno = models.PositiveIntegerField(null=True)
    canned_response = models.ForeignKey(
        CannedResponse, null=True, default=None,
        on_delete=models.SET_DEFAULT)
    comment = models.TextField(blank=True)

    class Meta:
        db_table = 'log_activity_comment_draft'
Ejemplo n.º 10
0
class GroupUser(models.Model):
    id = PositiveAutoField(primary_key=True)
    group = models.ForeignKey(Group, on_delete=models.CASCADE)
    user = models.ForeignKey('users.UserProfile', on_delete=models.CASCADE)

    class Meta:
        db_table = u'groups_users'

    def invalidate_groups_list(self):
        """Callback to invalidate user.groups_list when creating/deleting GroupUser
        instances for this user."""
        try:
            # groups_list is a @cached_property, delete it to force it to be
            # refreshed (ignore AttributeError, that just means it has not been
            # accessed yet).
            del self.user.groups_list
        except AttributeError:
            pass
Ejemplo n.º 11
0
class IPNetworkUserRestriction(ModelBase):
    id = PositiveAutoField(primary_key=True)
    network = CIDRField(
        blank=True,
        null=True,
        help_text=_(
            'Enter a valid IPv6 or IPv6 CIDR network range, eg. 127.0.0.1/28'))

    error_message = _('Multiple add-ons violating our policies have been'
                      ' submitted from your location. The IP address has been'
                      ' blocked.')

    class Meta:
        db_table = 'users_user_network_restriction'

    def __str__(self):
        return str(self.network)

    @classmethod
    def allow_request(cls, request):
        """
        Return whether the specified request should be allowed to submit
        add-ons.
        """
        try:
            remote_addr = ipaddress.ip_address(request.META.get('REMOTE_ADDR'))
            if request.user:
                user_last_login_ip = ipaddress.ip_address(
                    request.user.last_login_ip)
        except ValueError:
            # If we don't have a valid ip address, let's deny
            return False

        restrictions = IPNetworkUserRestriction.objects.all()

        for restriction in restrictions:
            if (remote_addr in restriction.network
                    or user_last_login_ip in restriction.network):
                log.info('Restricting request from %s %s, %s %s (%s)', 'ip',
                         remote_addr, 'last_login_ip', user_last_login_ip,
                         'network=%s' % restriction.network)
                return False

        return True
Ejemplo n.º 12
0
class CannedResponse(ModelBase):
    id = PositiveAutoField(primary_key=True)
    name = models.CharField(max_length=255)
    response = models.TextField()
    sort_group = models.CharField(max_length=255)
    type = models.PositiveIntegerField(
        choices=amo.CANNED_RESPONSE_TYPE_CHOICES.items(), db_index=True,
        default=0)

    # Category is used only by code-manager
    category = models.PositiveIntegerField(
        choices=amo.CANNED_RESPONSE_CATEGORY_CHOICES.items(),
        default=amo.CANNED_RESPONSE_CATEGORY_OTHER)

    class Meta:
        db_table = 'cannedresponses'

    def __str__(self):
        return str(self.name)
Ejemplo n.º 13
0
class Tag(ModelBase):
    id = PositiveAutoField(primary_key=True)
    tag_text = models.CharField(max_length=128)
    addons = models.ManyToManyField(
        'addons.Addon', through='AddonTag', related_name='tags'
    )
    num_addons = models.IntegerField(default=0)

    class Meta:
        db_table = 'tags'
        ordering = ('tag_text',)
        constraints = [models.UniqueConstraint(fields=('tag_text',), name='tag_text')]

    def __str__(self):
        return self.tag_text

    @property
    def popularity(self):
        return self.num_addons

    def can_reverse(self):
        try:
            self.get_url_path()
            return True
        except NoReverseMatch:
            return False

    def get_url_path(self):
        return reverse('tags.detail', args=[self.tag_text])

    def add_tag(self, addon):
        AddonTag.objects.get_or_create(addon=addon, tag=self)
        activity.log_create(amo.LOG.ADD_TAG, self, addon)

    def remove_tag(self, addon):
        for addon_tag in AddonTag.objects.filter(addon=addon, tag=self):
            addon_tag.delete()
        activity.log_create(amo.LOG.REMOVE_TAG, self, addon)

    def update_stat(self):
        self.num_addons = self.addons.count()
        self.save()
Ejemplo n.º 14
0
class DownloadCount(StatsSearchMixin, models.Model):
    id = PositiveAutoField(primary_key=True)
    # has an index `addon_id` on this column...
    addon = models.ForeignKey('addons.Addon')

    # has an index named `count` in dev, stage and prod
    count = models.PositiveIntegerField(db_index=True)
    date = models.DateField()
    sources = JSONField(db_column='src', null=True)

    class Meta:
        db_table = 'download_counts'

        # additional indices on this table (in dev, stage and prod):
        # * KEY `addon_and_count` (`addon_id`,`count`)
        # * KEY `addon_date_idx` (`addon_id`,`date`)

        # in our (dev, stage and prod) database:
        # UNIQUE KEY `date_2` (`date`,`addon_id`)
        unique_together = ('date', 'addon')
Ejemplo n.º 15
0
class DownloadCount(StatsSearchMixin, models.Model):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon', on_delete=models.CASCADE)

    count = models.PositiveIntegerField()
    date = models.DateField()
    sources = JSONField(db_column='src', null=True)

    class Meta:
        db_table = 'download_counts'
        indexes = [
            # FIXME: some of these might redundant. See #5712
            models.Index(fields=('count', ), name='count'),
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('addon', 'count'), name='addon_and_count'),
            models.Index(fields=('addon', 'date'), name='addon_date_idx')
        ]
        constraints = [
            models.UniqueConstraint(fields=['date', 'addon'], name='date_2'),
        ]
Ejemplo n.º 16
0
class DiscoveryModule(ModelBase):
    """
    Keeps the application, ordering, and locale metadata for a module.

    The modules are defined statically in modules.py and linked to a database
    row through the module's name.
    """
    id = PositiveAutoField(primary_key=True)
    app = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                      db_column='app_id')
    module = models.CharField(max_length=255)
    ordering = models.IntegerField(null=True, blank=True)
    locales = models.CharField(max_length=255, blank=True, default='')

    class Meta:
        db_table = 'discovery_modules'
        unique_together = ('app', 'module')

    def __unicode__(self):
        return u'%s (%s)' % (self.module, self.get_app_display())
Ejemplo n.º 17
0
class ApplicationsVersions(models.Model):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id')
    version = models.ForeignKey(
        Version, related_name='apps', on_delete=models.CASCADE)
    min = models.ForeignKey(
        AppVersion, db_column='min', related_name='min_set',
        on_delete=models.CASCADE)
    max = models.ForeignKey(
        AppVersion, db_column='max', related_name='max_set',
        on_delete=models.CASCADE)

    class Meta:
        db_table = 'applications_versions'
        constraints = [
            models.UniqueConstraint(fields=('application', 'version'),
                                    name='application_id'),
        ]

    def get_application_display(self):
        return str(amo.APPS_ALL[self.application].pretty)

    def get_latest_application_version(self):
        return (
            AppVersion.objects
            .filter(
                ~models.Q(version__contains='*'),
                application=self.application)
            .order_by('-version_int')
            .first())

    def __str__(self):
        if (self.version.is_compatible_by_default and
                self.version.is_compatible_app(amo.APP_IDS[self.application])):
            return ugettext(u'{app} {min} and later').format(
                app=self.get_application_display(),
                min=self.min
            )
        return u'%s %s - %s' % (self.get_application_display(),
                                self.min, self.max)
Ejemplo n.º 18
0
class UpdateCount(StatsSearchMixin, models.Model):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon', on_delete=models.CASCADE)
    count = models.PositiveIntegerField()
    date = models.DateField()
    versions = JSONField(db_column='version', null=True)
    statuses = JSONField(db_column='status', null=True)
    applications = JSONField(db_column='application', null=True)
    oses = JSONField(db_column='os', null=True)
    locales = JSONField(db_column='locale', null=True)

    class Meta:
        db_table = 'update_counts'
        indexes = [
            # FIXME: some of these might redundant. See #5712
            models.Index(fields=('count', ), name='count'),
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('date', ), name='date'),
            models.Index(fields=('addon', 'count'), name='addon_and_count'),
            models.Index(fields=('addon', 'date'), name='addon_date_idx')
        ]
Ejemplo n.º 19
0
class ActivityLogToken(ModelBase):
    id = PositiveAutoField(primary_key=True)
    version = models.ForeignKey(Version, related_name='token', on_delete=models.CASCADE)
    user = models.ForeignKey(
        'users.UserProfile',
        related_name='activity_log_tokens',
        on_delete=models.CASCADE,
    )
    uuid = models.UUIDField(default=uuid.uuid4, unique=True)
    use_count = models.IntegerField(
        default=0, help_text='Stores the number of times the token has been used'
    )

    class Meta:
        db_table = 'log_activity_tokens'
        constraints = [
            models.UniqueConstraint(fields=('version', 'user'), name='version_id'),
        ]

    def is_expired(self):
        return self.use_count >= MAX_TOKEN_USE_COUNT

    def is_valid(self):
        return (
            not self.is_expired()
            and self.version
            == self.version.addon.find_latest_version(
                channel=self.version.channel, exclude=()
            )
        )

    def expire(self):
        self.update(use_count=MAX_TOKEN_USE_COUNT)

    def increment_use(self):
        self.__class__.objects.filter(pk=self.pk).update(
            use_count=models.expressions.F('use_count') + 1
        )
        self.use_count = self.use_count + 1
Ejemplo n.º 20
0
class CollectionAddon(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(Addon)
    collection = models.ForeignKey(Collection)
    # category (deprecated: for "Fashion Your Firefox")
    comments = LinkifiedField(null=True)
    user = models.ForeignKey(UserProfile, null=True)

    ordering = models.PositiveIntegerField(
        default=0,
        help_text='Add-ons are displayed in ascending order '
        'based on this field.')

    class Meta(ModelBase.Meta):
        db_table = 'addons_collections'
        unique_together = (('addon', 'collection'), )

    @staticmethod
    def post_save(sender, instance, **kwargs):
        """Update Collection.addon_count and reindex add-on if the collection
        is featured."""
        from . import tasks
        tasks.collection_meta.delay(instance.collection_id)

    @staticmethod
    def post_delete(sender, instance, **kwargs):
        CollectionAddon.post_save(sender, instance, **kwargs)
        if instance.collection.is_featured():
            # The helpers .add_addon() and .remove_addon() already call .save()
            # on the collection, triggering update_featured_status() among
            # other things. However, this only takes care of the add-ons
            # present in the collection at the time, we also need to make sure
            # to invalidate add-ons that have been removed.
            Collection.update_featured_status(sender,
                                              instance.collection,
                                              addons=[instance.addon.pk],
                                              **kwargs)
Ejemplo n.º 21
0
class License(ModelBase):
    OTHER = 0

    id = PositiveAutoField(primary_key=True)
    name = TranslatedField()
    url = models.URLField(null=True)
    builtin = models.PositiveIntegerField(default=OTHER)
    text = LinkifiedField()
    on_form = models.BooleanField(
        default=False, help_text='Is this a license choice in the devhub?')

    objects = LicenseManager()

    class Meta:
        db_table = 'licenses'
        indexes = [models.Index(fields=('builtin', ), name='builtin_idx')]

    def __str__(self):
        license = self._constant or self
        return str(license.name)

    @property
    def _constant(self):
        return LICENSES_BY_BUILTIN.get(self.builtin)

    @property
    def creative_commons(self):
        return bool((constant := self._constant) and constant.creative_commons)

    @property
    def icons(self):
        return ((constant := self._constant) and constant.icons) or ''

    @property
    def slug(self):
        return ((constant := self._constant) and constant.slug) or None
Ejemplo n.º 22
0
class AppVersion(ModelBase):
    id = PositiveAutoField(primary_key=True)
    application = models.PositiveIntegerField(choices=APPS_CHOICES,
                                              db_column='application_id')
    version = models.CharField(max_length=255, default='')
    version_int = models.BigIntegerField(editable=False)

    class Meta:
        db_table = 'appversions'
        ordering = ['-version_int']
        unique_together = ('application', 'version')

    def save(self, *args, **kw):
        if not self.version_int:
            self.version_int = compare.version_int(self.version)
        return super(AppVersion, self).save(*args, **kw)

    def __init__(self, *args, **kwargs):
        super(AppVersion, self).__init__(*args, **kwargs)
        # Add all the major, minor, ..., version attributes to the object.
        self.__dict__.update(compare.version_dict(self.version or ''))

    def __unicode__(self):
        return self.version
Ejemplo n.º 23
0
class Version(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey('addons.Addon',
                              related_name='versions',
                              on_delete=models.CASCADE)
    license = models.ForeignKey('License',
                                null=True,
                                blank=True,
                                on_delete=models.SET_NULL)
    release_notes = PurifiedField(db_column='releasenotes', short=False)
    approval_notes = models.TextField(db_column='approvalnotes',
                                      default='',
                                      null=True,
                                      blank=True)
    version = VersionStringField(max_length=255, default='0.1')

    nomination = models.DateTimeField(null=True)
    reviewed = models.DateTimeField(null=True)

    deleted = models.BooleanField(default=False)

    source = models.FileField(upload_to=source_upload_path,
                              null=True,
                              blank=True,
                              max_length=255)

    channel = models.IntegerField(choices=amo.RELEASE_CHANNEL_CHOICES,
                                  default=amo.RELEASE_CHANNEL_LISTED)

    git_hash = models.CharField(max_length=40, blank=True)

    needs_human_review = models.BooleanField(default=False)

    # The order of those managers is very important: please read the lengthy
    # comment above the Addon managers declaration/instantiation.
    unfiltered = VersionManager(include_deleted=True)
    objects = VersionManager()

    # See UnfilteredVersionManagerForRelations() docstring for usage of this
    # special manager.
    unfiltered_for_relations = UnfilteredVersionManagerForRelations()

    class Meta(ModelBase.Meta):
        db_table = 'versions'
        # This is very important: please read the lengthy comment in Addon.Meta
        # description
        base_manager_name = 'unfiltered'
        ordering = ['-created', '-modified']
        indexes = [
            models.Index(fields=('addon', ), name='addon_id'),
            models.Index(fields=('license', ), name='license_id'),
        ]
        constraints = [
            models.UniqueConstraint(
                fields=('addon', 'version'),
                name='versions_addon_id_version_5a2e75b6_uniq',
            ),
        ]

    def __str__(self):
        return markupsafe.escape(self.version)

    @classmethod
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.addons.utils import RestrictionChecker
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if not upload.user or not upload.ip_address or not upload.source:
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        if not upload.user.last_login_ip or not upload.user.email:
            raise VersionCreateError(
                'FileUpload user does not have some required fields')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        email = upload.user.email if upload.user and upload.user.email else ''
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                f'New version: {version!r} ({version.id}) from {upload!r}',
                extra={
                    'email': email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user or get_task_user())

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # Pre-generate _compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version._compatible_apps = compatible_apps

        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(
                upload=upload,
                version=version,
                parsed_data=parsed_data,
            )
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if version.is_webextension:
            if (waffle.switch_is_active('enable-yara')
                    or waffle.switch_is_active('enable-customs')
                    or waffle.switch_is_active('enable-wat')):
                ScannerResult.objects.filter(upload_id=upload.id).update(
                    version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        # Check if the approval should be restricted
        if not RestrictionChecker(upload=upload).is_auto_approval_allowed():
            flag = ('auto_approval_disabled'
                    if channel == amo.RELEASE_CHANNEL_LISTED else
                    'auto_approval_disabled_unlisted')
            reviewer_flags_defaults[flag] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version

    def license_url(self, impala=False):
        return reverse('addons.license', args=[self.addon.slug, self.version])

    def get_url_path(self):
        if self.channel == amo.RELEASE_CHANNEL_UNLISTED:
            return ''
        return reverse('addons.versions', args=[self.addon.slug])

    def delete(self, hard=False):
        # To avoid a circular import
        from .tasks import delete_preview_files

        log.info(f'Version deleted: {self!r} ({self.id})')
        activity.log_create(amo.LOG.DELETE_VERSION, self.addon,
                            str(self.version))

        if hard:
            super().delete()
        else:
            # By default we soft delete so we can keep the files for comparison
            # and a record of the version number.
            self.files.update(status=amo.STATUS_DISABLED)
            self.deleted = True
            self.save()

            # Clear pending rejection flag (we have the activity log for
            # records purposes, the flag serves no purpose anymore if the
            # version is deleted).
            VersionReviewerFlags.objects.filter(version=self).update(
                pending_rejection=None)

            previews_pks = list(
                VersionPreview.objects.filter(version__id=self.id).values_list(
                    'id', flat=True))

            for preview_pk in previews_pks:
                delete_preview_files.delay(preview_pk)

    @property
    def is_user_disabled(self):
        return (self.files.filter(status=amo.STATUS_DISABLED).exclude(
            original_status=amo.STATUS_NULL).exists())

    @is_user_disabled.setter
    def is_user_disabled(self, disable):
        # User wants to disable (and the File isn't already).
        if disable:
            activity.log_create(amo.LOG.DISABLE_VERSION, self.addon, self)
            for file in self.files.exclude(status=amo.STATUS_DISABLED).all():
                file.update(original_status=file.status,
                            status=amo.STATUS_DISABLED)
        # User wants to re-enable (and user did the disable, not Mozilla).
        else:
            activity.log_create(amo.LOG.ENABLE_VERSION, self.addon, self)
            for file in self.files.exclude(
                    original_status=amo.STATUS_NULL).all():
                file.update(status=file.original_status,
                            original_status=amo.STATUS_NULL)

    @cached_property
    def all_activity(self):
        # prefetch_related() and not select_related() the ActivityLog to make
        # sure its transformer is called.
        return self.versionlog_set.prefetch_related('activity_log').order_by(
            'created')

    @property
    def compatible_apps(self):
        # Dicts and search providers don't have compatibility info.
        # Fake one for them.
        if self.addon and self.addon.type in amo.NO_COMPAT:
            return {app: None for app in amo.APP_TYPE_SUPPORT[self.addon.type]}
        # Otherwise, return _compatible_apps which is a cached property that
        # is filled by the transformer, or simply calculated from the related
        # compat instances.
        return self._compatible_apps

    @cached_property
    def _compatible_apps(self):
        """Get a mapping of {APP: ApplicationsVersions}."""
        return self._compat_map(self.apps.all().select_related('min', 'max'))

    @cached_property
    def compatible_apps_ordered(self):
        apps = self.compatible_apps.items()
        return sorted(apps, key=lambda v: v[0].short)

    @cached_property
    def is_compatible_by_default(self):
        """Returns whether or not the add-on is considered compatible by
        default."""
        # Use self.all_files directly since that's cached and more potentially
        # prefetched through a transformer already
        return not any([
            file for file in self.all_files
            if file.binary_components or file.strict_compatibility
        ])

    def is_compatible_app(self, app):
        """Returns True if the provided app passes compatibility conditions."""
        if self.addon.type in amo.NO_COMPAT:
            return True
        appversion = self.compatible_apps.get(app)
        if appversion and app.id in amo.D2C_MIN_VERSIONS:
            return version_int(appversion.max.version) >= version_int(
                amo.D2C_MIN_VERSIONS.get(app.id, '*'))
        return False

    def compat_override_app_versions(self):
        """Returns the incompatible app versions range(s).

        If not ranges, returns empty list.  Otherwise, this will return all
        the app version ranges that this particular version is incompatible
        with.
        """
        overrides = list(self.addon.compatoverride_set.all())

        if not overrides:
            return []

        app_versions = []
        for co in overrides:
            for range in co.collapsed_ranges():
                if (version_int(range.min) <= version_int(self.version) <=
                        version_int(range.max)):
                    app_versions.extend([(a.min, a.max) for a in range.apps])
        return app_versions

    @cached_property
    def all_files(self):
        """Shortcut for list(self.files.all()). Cached."""
        return list(self.files.all())

    @property
    def current_file(self):
        """Shortcut for selecting the first file from self.all_files"""
        return self.all_files[0]

    @property
    def status(self):
        return [
            f.STATUS_CHOICES.get(f.status,
                                 gettext('[status:%s]') % f.status)
            for f in self.all_files
        ]

    @property
    def statuses(self):
        """Unadulterated statuses, good for an API."""
        return [(f.id, f.status) for f in self.all_files]

    def is_public(self):
        # To be public, a version must not be deleted, must belong to a public
        # addon, and all its attached files must have public status.
        try:
            return (not self.deleted and self.addon.is_public()
                    and all(f.status == amo.STATUS_APPROVED
                            for f in self.all_files))
        except ObjectDoesNotExist:
            return False

    @property
    def is_webextension(self):
        return any(file_.is_webextension for file_ in self.all_files)

    @property
    def is_mozilla_signed(self):
        """Is the file a special "Mozilla Signed Extension"

        See https://wiki.mozilla.org/Add-ons/InternalSigning for more details.
        We use that information to workaround compatibility limits for legacy
        add-ons and to avoid them receiving negative boosts compared to
        WebExtensions.

        See https://github.com/mozilla/addons-server/issues/6424
        """
        return all(file_.is_mozilla_signed_extension
                   for file_ in self.all_files)

    @property
    def has_files(self):
        return bool(self.all_files)

    @property
    def is_unreviewed(self):
        return bool(
            list(
                filter(lambda f: f.status in amo.UNREVIEWED_FILE_STATUSES,
                       self.all_files)))

    @property
    def is_all_unreviewed(self):
        return not bool([
            f for f in self.all_files
            if f.status not in amo.UNREVIEWED_FILE_STATUSES
        ])

    @property
    def sources_provided(self):
        return bool(self.source)

    def _compat_map(self, avs):
        apps = {}
        for av in avs:
            av.version = self
            app_id = av.application
            if app_id in amo.APP_IDS:
                apps[amo.APP_IDS[app_id]] = av
        return apps

    @classmethod
    def transformer(cls, versions):
        """Attach all the compatible apps and files to the versions."""
        if not versions:
            return

        ids = {v.id for v in versions}
        avs = ApplicationsVersions.objects.filter(
            version__in=ids).select_related('min', 'max')
        files = File.objects.filter(version__in=ids)

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return {k: list(vs) for k, vs in groups}

        av_dict, file_dict = rollup(avs), rollup(files)

        for version in versions:
            v_id = version.id
            version._compatible_apps = version._compat_map(
                av_dict.get(v_id, []))
            version.all_files = file_dict.get(v_id, [])
            for f in version.all_files:
                f.version = version

    @classmethod
    def transformer_promoted(cls, versions):
        """Attach the promoted approvals to the versions."""
        if not versions:
            return

        PromotedApproval = versions[0].promoted_approvals.model

        ids = {v.id for v in versions}

        approvals = list(
            PromotedApproval.objects.filter(version_id__in=ids).values_list(
                'version_id', 'group_id', 'application_id', named=True))

        approval_dict = {
            version_id: list(groups)
            for version_id, groups in sorted_groupby(approvals, 'version_id')
        }
        for version in versions:
            v_id = version.id
            groups = [(
                PROMOTED_GROUPS_BY_ID.get(approval.group_id),
                APP_IDS.get(approval.application_id),
            ) for approval in approval_dict.get(v_id, [])
                      if approval.group_id in PROMOTED_GROUPS_BY_ID]
            version.approved_for_groups = groups

    @classmethod
    def transformer_activity(cls, versions):
        """Attach all the activity to the versions."""
        from olympia.activity.models import VersionLog

        ids = {v.id for v in versions}
        if not versions:
            return

        # Ideally, we'd start from the ActivityLog, but because VersionLog
        # to ActivityLog isn't a OneToOneField, we wouldn't be able to find
        # the version easily afterwards - we can't even do a
        # select_related('versionlog') and try to traverse the relation to find
        # the version. So, instead, start from VersionLog, but make sure to use
        # prefetch_related() (and not select_related() - yes, it's one extra
        # query, but it's worth it to benefit from the default transformer) so
        # that the ActivityLog default transformer is called.
        al = (VersionLog.objects.prefetch_related('activity_log').filter(
            version__in=ids).order_by('created'))

        def rollup(xs):
            groups = sorted_groupby(xs, 'version_id')
            return {k: list(vs) for k, vs in groups}

        al_dict = rollup(al)

        for version in versions:
            v_id = version.id
            version.all_activity = al_dict.get(v_id, [])

    @classmethod
    def transformer_license(cls, versions):
        """Attach all the licenses to the versions.

        Do not use if you need the license text: it's explicitly deferred in
        this transformer, because it should only be used when listing multiple
        versions, where returning license text is not supposed to be needed.

        The translations app doesn't fully handle evaluating a deferred field,
        so the callers need to make sure the license text will never be needed
        on instances returned by a queryset transformed by this method."""
        if not versions:
            return
        license_ids = {ver.license_id for ver in versions}
        licenses = License.objects.filter(id__in=license_ids).defer('text')
        license_dict = {lic.id: lic for lic in licenses}

        for version in versions:
            license = license_dict.get(version.license_id)
            if license:
                version.license = license

    @classmethod
    def transformer_auto_approvable(cls, versions):
        """Attach  auto-approvability information to the versions."""
        ids = {v.id for v in versions}
        if not ids:
            return

        auto_approvable = (Version.objects.auto_approvable().filter(
            pk__in=ids).values_list('pk', flat=True))

        for version in versions:
            version.is_ready_for_auto_approval = version.pk in auto_approvable

    def disable_old_files(self):
        """
        Disable files from versions older than the current one in the same
        channel and awaiting review. Used when uploading a new version.

        Does nothing if the current instance is unlisted.
        """
        if self.channel == amo.RELEASE_CHANNEL_LISTED:
            qs = File.objects.filter(
                version__addon=self.addon_id,
                version__lt=self.id,
                version__deleted=False,
                version__channel=self.channel,
                status=amo.STATUS_AWAITING_REVIEW,
            )
            # Use File.update so signals are triggered.
            for f in qs:
                f.update(status=amo.STATUS_DISABLED)

    def reset_nomination_time(self, nomination=None):
        if not self.nomination or nomination:
            nomination = nomination or datetime.datetime.now()
            # We need signal=False not to call update_status (which calls us).
            self.update(nomination=nomination, _signal=False)

    def inherit_nomination(self, from_statuses=None):
        last_ver = (Version.objects.filter(
            addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED).exclude(
                nomination=None).exclude(id=self.pk).order_by('-nomination'))
        if from_statuses:
            last_ver = last_ver.filter(files__status__in=from_statuses)
        if last_ver.exists():
            self.reset_nomination_time(nomination=last_ver[0].nomination)

    @property
    def unreviewed_files(self):
        """A File is unreviewed if its status is amo.STATUS_AWAITING_REVIEW."""
        return self.files.filter(status=amo.STATUS_AWAITING_REVIEW)

    @cached_property
    def is_ready_for_auto_approval(self):
        """Return whether or not this version could be *considered* for
        auto-approval.

        Does not necessarily mean that it would be auto-approved, just that it
        passes the most basic criteria to be considered a candidate by the
        auto_approve command."""
        return Version.objects.auto_approvable().filter(id=self.id).exists()

    @property
    def was_auto_approved(self):
        """Return whether or not this version was auto-approved."""
        from olympia.reviewers.models import AutoApprovalSummary

        try:
            return (self.is_public() and AutoApprovalSummary.objects.filter(
                version=self).get().verdict == amo.AUTO_APPROVED)
        except AutoApprovalSummary.DoesNotExist:
            pass
        return False

    def get_background_images_encoded(self, header_only=False):
        if not self.has_files:
            return {}
        file_obj = self.all_files[0]
        return {
            name: force_str(b64encode(background))
            for name, background in utils.get_background_images(
                file_obj, theme_data=None, header_only=header_only).items()
        }

    def can_be_disabled_and_deleted(self):
        # see https://github.com/mozilla/addons-server/issues/15121#issuecomment-667226959  # noqa
        # "It should apply to the <groups> that require a review to be badged"
        from olympia.promoted.models import PromotedApproval

        if self != self.addon.current_version or (
                not (group := self.addon.promoted_group())
                or not (group.badged and group.pre_review)):
            return True

        previous_ver = (self.addon.versions.valid().filter(
            channel=self.channel).exclude(id=self.id).no_transforms()[:1])
        previous_approval = PromotedApproval.objects.filter(
            group_id=group.id, version__in=previous_ver)
        return previous_approval.exists()
Ejemplo n.º 24
0
class CollectionAddon(ModelBase):
    id = PositiveAutoField(primary_key=True)
    addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
    collection = models.ForeignKey(Collection, on_delete=models.CASCADE)
    # category (deprecated: for "Fashion Your Firefox")
    comments = LinkifiedField(null=True)
    user = models.ForeignKey(UserProfile, null=True, on_delete=models.CASCADE)

    ordering = models.PositiveIntegerField(
        default=0,
        help_text='Add-ons are displayed in ascending order based on this field.',
    )

    class Meta(ModelBase.Meta):
        db_table = 'addons_collections'
        indexes = [
            models.Index(
                fields=('collection', 'created'), name='addons_collections_created_idx'
            ),
            models.Index(fields=('addon',), name='addons_collections_addon_idx'),
            models.Index(fields=('collection',), name='collection_id'),
            models.Index(fields=('user',), name='addons_collections_user_id'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('addon', 'collection'), name='addon_id_2'),
        ]

    @staticmethod
    def post_save(sender, instance, **kwargs):
        """Update Collection.addon_count and reindex add-on if the collection
        is featured."""
        from olympia.addons.tasks import index_addons

        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(
                amo.LOG.ADD_TO_COLLECTION, instance.addon, instance.collection
            )
        kwargs['addons'] = [instance.addon]
        Collection.post_save(sender, instance.collection, **kwargs)
        if instance.collection.id == settings.COLLECTION_FEATURED_THEMES_ID:
            # That collection is special: each add-on in it is considered
            # recommended, so we need to index the corresponding add-on.
            # (Note: we are considering the add-on in a given CollectionAddon
            #  never changes, to change add-ons belonging to a collection we
            #  add or remove CollectionAddon instances, we never modify the
            #  addon foreignkey of an existing instance).
            index_addons.delay([instance.addon.id])

    @staticmethod
    def post_delete(sender, instance, **kwargs):

        from olympia.addons.tasks import index_addons

        if kwargs.get('raw'):
            return
        if instance.collection.listed:
            activity.log_create(
                amo.LOG.REMOVE_FROM_COLLECTION, instance.addon, instance.collection
            )
        kwargs['addons'] = [instance.addon]
        Collection.post_save(sender, instance.collection, **kwargs)
        if instance.collection.id == settings.COLLECTION_FEATURED_THEMES_ID:
            # That collection is special: each add-on in it is considered
            # recommended, so we need to index the add-on we just removed from
            # it.
            index_addons.delay([instance.addon.id])
Ejemplo n.º 25
0
class ReviewerScore(ModelBase):
    id = PositiveAutoField(primary_key=True)
    user = models.ForeignKey(UserProfile, related_name='_reviewer_scores')
    addon = models.ForeignKey(Addon, blank=True, null=True, related_name='+')
    version = models.ForeignKey(Version,
                                blank=True,
                                null=True,
                                related_name='+')
    score = models.IntegerField()
    # For automated point rewards.
    note_key = models.SmallIntegerField(choices=amo.REVIEWED_CHOICES.items(),
                                        default=0)
    # For manual point rewards with a note.
    note = models.CharField(max_length=255)

    class Meta:
        db_table = 'reviewer_scores'
        ordering = ('-created', )

    @classmethod
    def get_key(cls, key=None, invalidate=False):
        namespace = 'riscore'
        if not key:  # Assuming we're invalidating the namespace.
            cache_ns_key(namespace, invalidate)
            return
        else:
            # Using cache_ns_key so each cache val is invalidated together.
            ns_key = cache_ns_key(namespace, invalidate)
            return '%s:%s' % (ns_key, key)

    @classmethod
    def get_event(cls,
                  addon,
                  status,
                  version=None,
                  post_review=False,
                  content_review=False):
        """Return the review event type constant.

        This is determined by the addon.type and the queue the addon is
        currently in (which is determined from the various parameters sent
        down from award_points()).

        Note: We're not using addon.status or addon.current_version because
        this is called after the status/current_version might have been updated
        by the reviewer action.

        """
        reviewed_score_name = None
        if content_review:
            # Content review always gives the same amount of points.
            reviewed_score_name = 'REVIEWED_CONTENT_REVIEW'
        elif post_review:
            # There are 4 tiers of post-review scores depending on the addon
            # weight.
            try:
                if version is None:
                    raise AutoApprovalSummary.DoesNotExist
                weight = version.autoapprovalsummary.weight
            except AutoApprovalSummary.DoesNotExist as exception:
                log.exception(
                    'No such version/auto approval summary when determining '
                    'event type to award points: %r', exception)
                weight = 0

            if addon.type == amo.ADDON_DICT:
                reviewed_score_name = 'REVIEWED_DICT_FULL'
            elif addon.type in [amo.ADDON_LPAPP, amo.ADDON_LPADDON]:
                reviewed_score_name = 'REVIEWED_LP_FULL'
            elif addon.type == amo.ADDON_SEARCH:
                reviewed_score_name = 'REVIEWED_SEARCH_FULL'
            elif weight > amo.POST_REVIEW_WEIGHT_HIGHEST_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_HIGHEST_RISK'
            elif weight > amo.POST_REVIEW_WEIGHT_HIGH_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_HIGH_RISK'
            elif weight > amo.POST_REVIEW_WEIGHT_MEDIUM_RISK:
                reviewed_score_name = 'REVIEWED_EXTENSION_MEDIUM_RISK'
            else:
                reviewed_score_name = 'REVIEWED_EXTENSION_LOW_RISK'
        else:
            if status == amo.STATUS_NOMINATED:
                queue = 'FULL'
            elif status == amo.STATUS_PUBLIC:
                queue = 'UPDATE'
            else:
                queue = ''

            if (addon.type
                    in [amo.ADDON_EXTENSION, amo.ADDON_PLUGIN, amo.ADDON_API]
                    and queue):
                reviewed_score_name = 'REVIEWED_ADDON_%s' % queue
            elif addon.type == amo.ADDON_DICT and queue:
                reviewed_score_name = 'REVIEWED_DICT_%s' % queue
            elif addon.type in [amo.ADDON_LPAPP, amo.ADDON_LPADDON] and queue:
                reviewed_score_name = 'REVIEWED_LP_%s' % queue
            elif addon.type == amo.ADDON_PERSONA:
                reviewed_score_name = 'REVIEWED_PERSONA'
            elif addon.type == amo.ADDON_STATICTHEME:
                reviewed_score_name = 'REVIEWED_STATICTHEME'
            elif addon.type == amo.ADDON_SEARCH and queue:
                reviewed_score_name = 'REVIEWED_SEARCH_%s' % queue
            elif addon.type == amo.ADDON_THEME and queue:
                reviewed_score_name = 'REVIEWED_XUL_THEME_%s' % queue

        if reviewed_score_name:
            return getattr(amo, reviewed_score_name)
        return None

    @classmethod
    def award_points(cls,
                     user,
                     addon,
                     status,
                     version=None,
                     post_review=False,
                     content_review=False,
                     extra_note=''):
        """Awards points to user based on an event and the queue.

        `event` is one of the `REVIEWED_` keys in constants.
        `status` is one of the `STATUS_` keys in constants.
        `version` is the `Version` object that was affected by the review.
        `post_review` is set to True if the add-on was auto-approved and the
                      reviewer is confirming/rejecting post-approval.
        `content_review` is set to True if it's a content-only review of an
                         auto-approved add-on.

        """

        # If a webextension file gets approved manually (e.g. because
        # auto-approval is disabled), 'post-review' is set to False, treating
        # the file as a legacy file which is not what we want. The file is
        # still a webextension and should treated as such, regardless of
        # auto-approval being disabled or not.
        # As a hack, we set 'post_review' to True.
        if (version and version.is_webextension
                and addon.type in amo.GROUP_TYPE_ADDON):
            post_review = True

        user_log.info(
            (u'Determining award points for user %s for version %s of addon %s'
             % (user, version, addon.id)).encode('utf-8'))

        event = cls.get_event(addon,
                              status,
                              version=version,
                              post_review=post_review,
                              content_review=content_review)
        score = amo.REVIEWED_SCORES.get(event)

        user_log.info(
            (u'Determined %s award points (event: %s) for user %s for version '
             u'%s of addon %s' %
             (score, event, user, version, addon.id)).encode('utf-8'))

        # Add bonus to reviews greater than our limit to encourage fixing
        # old reviews. Does not apply to content-review/post-review at the
        # moment, because it would need to be calculated differently.
        award_overdue_bonus = (version and version.nomination
                               and not post_review and not content_review)
        if award_overdue_bonus:
            waiting_time_days = (datetime.now() - version.nomination).days
            days_over = waiting_time_days - amo.REVIEWED_OVERDUE_LIMIT
            if days_over > 0:
                bonus = days_over * amo.REVIEWED_OVERDUE_BONUS
                score = score + bonus

        if score is not None:
            cls.objects.create(user=user,
                               addon=addon,
                               score=score,
                               note_key=event,
                               note=extra_note,
                               version=version)
            cls.get_key(invalidate=True)
            user_log.info(
                (u'Awarding %s points to user %s for "%s" for addon %s' %
                 (score, user, amo.REVIEWED_CHOICES[event],
                  addon.id)).encode('utf-8'))
        return score

    @classmethod
    def award_moderation_points(cls, user, addon, review_id, undo=False):
        """Awards points to user based on moderated review."""
        event = (amo.REVIEWED_ADDON_REVIEW
                 if not undo else amo.REVIEWED_ADDON_REVIEW_POORLY)
        score = amo.REVIEWED_SCORES.get(event)

        cls.objects.create(user=user, addon=addon, score=score, note_key=event)
        cls.get_key(invalidate=True)
        user_log.info(u'Awarding %s points to user %s for "%s" for review %s' %
                      (score, user, amo.REVIEWED_CHOICES[event], review_id))

    @classmethod
    def get_total(cls, user):
        """Returns total points by user."""
        key = cls.get_key('get_total:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        val = (ReviewerScore.objects.filter(user=user).aggregate(
            total=Sum('score')).values())[0]
        if val is None:
            val = 0

        cache.set(key, val, None)
        return val

    @classmethod
    def get_recent(cls, user, limit=5, addon_type=None):
        """Returns most recent ReviewerScore records."""
        key = cls.get_key('get_recent:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        val = ReviewerScore.objects.filter(user=user)
        if addon_type is not None:
            val.filter(addon__type=addon_type)

        val = list(val[:limit])
        cache.set(key, val, None)
        return val

    @classmethod
    def get_breakdown(cls, user):
        """Returns points broken down by addon type."""
        key = cls.get_key('get_breakdown:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        sql = """
             SELECT `reviewer_scores`.*,
                    SUM(`reviewer_scores`.`score`) AS `total`,
                    `addons`.`addontype_id` AS `atype`
             FROM `reviewer_scores`
             LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
             WHERE `reviewer_scores`.`user_id` = %s
             GROUP BY `addons`.`addontype_id`
             ORDER BY `total` DESC
        """
        val = list(ReviewerScore.objects.raw(sql, [user.id]))
        cache.set(key, val, None)
        return val

    @classmethod
    def get_breakdown_since(cls, user, since):
        """
        Returns points broken down by addon type since the given datetime.
        """
        key = cls.get_key('get_breakdown:%s:%s' % (user.id, since.isoformat()))
        val = cache.get(key)
        if val is not None:
            return val

        sql = """
             SELECT `reviewer_scores`.*,
                    SUM(`reviewer_scores`.`score`) AS `total`,
                    `addons`.`addontype_id` AS `atype`
             FROM `reviewer_scores`
             LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
             WHERE `reviewer_scores`.`user_id` = %s AND
                   `reviewer_scores`.`created` >= %s
             GROUP BY `addons`.`addontype_id`
             ORDER BY `total` DESC
        """
        val = list(ReviewerScore.objects.raw(sql, [user.id, since]))
        cache.set(key, val, 3600)
        return val

    @classmethod
    def _leaderboard_list(cls, since=None, types=None, addon_type=None):
        """
        Returns base leaderboard list. Each item will be a tuple containing
        (user_id, name, total).
        """

        reviewers = (UserProfile.objects.filter(
            groups__name__startswith='Reviewers: ').exclude(
                groups__name__in=('Staff', 'Admins',
                                  'No Reviewer Incentives')).distinct())
        qs = (cls.objects.values_list('user__id').filter(
            user__in=reviewers).annotate(
                total=Sum('score')).order_by('-total'))

        if since is not None:
            qs = qs.filter(created__gte=since)

        if types is not None:
            qs = qs.filter(note_key__in=types)

        if addon_type is not None:
            qs = qs.filter(addon__type=addon_type)

        users = {reviewer.pk: reviewer for reviewer in reviewers}
        return [(item[0], users.get(item[0], UserProfile()).name, item[1])
                for item in qs]

    @classmethod
    def get_leaderboards(cls, user, days=7, types=None, addon_type=None):
        """Returns leaderboards with ranking for the past given days.

        This will return a dict of 3 items::

            {'leader_top': [...],
             'leader_near: [...],
             'user_rank': (int)}

        If the user is not in the leaderboard, or if the user is in the top 5,
        'leader_near' will be an empty list and 'leader_top' will contain 5
        elements instead of the normal 3.

        """
        key = cls.get_key('get_leaderboards:%s' % user.id)
        val = cache.get(key)
        if val is not None:
            return val

        week_ago = date.today() - timedelta(days=days)

        leader_top = []
        leader_near = []

        leaderboard = cls._leaderboard_list(since=week_ago,
                                            types=types,
                                            addon_type=addon_type)

        scores = []

        user_rank = 0
        in_leaderboard = False
        for rank, row in enumerate(leaderboard, 1):
            user_id, name, total = row
            scores.append({
                'user_id': user_id,
                'name': name,
                'rank': rank,
                'total': int(total),
            })
            if user_id == user.id:
                user_rank = rank
                in_leaderboard = True

        if not in_leaderboard:
            leader_top = scores[:5]
        else:
            if user_rank <= 5:  # User is in top 5, show top 5.
                leader_top = scores[:5]
            else:
                leader_top = scores[:3]
                leader_near = [scores[user_rank - 2], scores[user_rank - 1]]
                try:
                    leader_near.append(scores[user_rank])
                except IndexError:
                    pass  # User is last on the leaderboard.

        val = {
            'leader_top': leader_top,
            'leader_near': leader_near,
            'user_rank': user_rank,
        }
        cache.set(key, val, None)
        return val

    @classmethod
    def all_users_by_score(cls):
        """
        Returns reviewers ordered by highest total points first.
        """
        leaderboard = cls._leaderboard_list()
        scores = []

        for row in leaderboard:
            user_id, name, total = row
            user_level = len(amo.REVIEWED_LEVELS) - 1
            for i, level in enumerate(amo.REVIEWED_LEVELS):
                if total < level['points']:
                    user_level = i - 1
                    break

            # Only show level if it changes.
            if user_level < 0:
                level = ''
            else:
                level = unicode(amo.REVIEWED_LEVELS[user_level]['name'])

            scores.append({
                'user_id': user_id,
                'name': name,
                'total': int(total),
                'level': level,
            })

        prev = None
        for score in reversed(scores):
            if score['level'] == prev:
                score['level'] = ''
            else:
                prev = score['level']

        return scores
Ejemplo n.º 26
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    platform = models.PositiveIntegerField(
        choices=amo.SUPPORTED_PLATFORMS_CHOICES,
        default=amo.PLATFORM_ALL.id,
        db_column="platform_id")
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False, db_index=True)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'

    def __str__(self):
        return six.text_type(self.id)

    def get_platform_display(self):
        return force_text(amo.PLATFORMS[self.platform].name)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, src, attachment=False):
        return self._make_download_url('downloads.file',
                                       src,
                                       attachment=attachment)

    def _make_download_url(self, view_name, src, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return absolutify(urlparams(url, src=src))

    @classmethod
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload_path = force_text(nfd_str(upload.path))
        ext = force_text(os.path.splitext(upload_path)[1])
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.save()

        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        with open(filename or self.current_file_path, 'rb') as fobj:
            return 'sha256:{}'.format(get_sha256(fobj))

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}-{platform}
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        if self.platform and self.platform != amo.PLATFORM_ALL.id:
            parts.append(amo.PLATFORMS[self.platform].shortname)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return u'%s...%s' % (m.group('slug')[0:(maxlen - 3)],
                             m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if self.platform != amo.PLATFORM_ALL.id:
            kw['platform'] = self.platform
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(reverse('downloads.latest', kwargs=kw),
                            'addon-%s-latest%s' % (addon.pk, self.extension))

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source, destination, log_message):
        """Move a file from `source` to `destination`."""
        log_message = force_text(log_message)
        try:
            if storage.exists(source):
                log.info(
                    log_message.format(source=source, destination=destination))
                move_stored_file(source, destination)
        except (UnicodeEncodeError, IOError):
            msg = u'Move Failure: {} {}'.format(source, destination)
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a disabled file to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    _get_localepicker = re.compile(r'^locale browser ([\w\-_]+) (.*)$', re.M)

    @memoize(prefix='localepicker', timeout=None)
    def get_localepicker(self):
        """
        For a file that is part of a language pack, extract
        the chrome/localepicker.properties file and return as
        a string.
        """
        start = time.time()

        try:
            zip_ = SafeZip(self.file_path)
        except (zipfile.BadZipfile, IOError):
            return ''

        try:
            manifest = force_text(zip_.read('chrome.manifest'))
        except KeyError:
            log.info('No file named: chrome.manifest in file: %s' % self.pk)
            return ''

        res = self._get_localepicker.search(manifest)
        if not res:
            log.error('Locale browser not in chrome.manifest: %s' % self.pk)
            return ''

        try:
            path = res.groups()[1]
            if 'localepicker.properties' not in path:
                path = os.path.join(path, 'localepicker.properties')
            res = zip_.extract_from_manifest(path)
        except (zipfile.BadZipfile, IOError) as e:
            log.error('Error unzipping: %s, %s in file: %s' %
                      (path, e, self.pk))
            return ''
        except (ValueError, KeyError) as e:
            log.error('No file named: %s in file: %s' % (e, self.pk))
            return ''

        end = time.time() - start
        log.info('Extracted localepicker file: %s in %.2fs' % (self.pk, end))
        statsd.timing('files.extract.localepicker', (end * 1000))
        return force_text(res)

    @cached_property
    def webext_permissions_list(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, six.string_types)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []
Ejemplo n.º 27
0
class Collection(ModelBase):
    id = PositiveAutoField(primary_key=True)
    TYPE_CHOICES = amo.COLLECTION_CHOICES.items()

    uuid = models.UUIDField(blank=True, unique=True, null=True)
    name = TranslatedField(require_locale=False)
    # nickname is deprecated.  Use slug.
    nickname = models.CharField(max_length=30,
                                blank=True,
                                unique=True,
                                null=True)
    slug = models.CharField(max_length=30, blank=True, null=True)

    description = NoLinksNoMarkupField(require_locale=False)
    default_locale = models.CharField(max_length=10,
                                      default='en-US',
                                      db_column='defaultlocale')
    type = models.PositiveIntegerField(db_column='collection_type',
                                       choices=TYPE_CHOICES,
                                       default=0)

    listed = models.BooleanField(
        default=True, help_text='Collections are either listed or private.')

    application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
                                              db_column='application_id',
                                              blank=True,
                                              null=True,
                                              db_index=True)
    addon_count = models.PositiveIntegerField(default=0,
                                              db_column='addonCount')

    all_personas = models.BooleanField(
        default=False, help_text='Does this collection only contain Themes?')

    addons = models.ManyToManyField(Addon,
                                    through='CollectionAddon',
                                    related_name='collections')
    author = models.ForeignKey(UserProfile,
                               null=True,
                               related_name='collections')

    objects = CollectionManager()

    class Meta(ModelBase.Meta):
        db_table = 'collections'
        unique_together = (('author', 'slug'), )

    def __str__(self):
        return u'%s (%s)' % (self.name, self.addon_count)

    def save(self, **kw):
        if not self.uuid:
            self.uuid = uuid.uuid4()
        if not self.slug:
            # Work with both, strings (if passed manually on .create()
            # and UUID instances)
            self.slug = str(self.uuid).replace('-', '')[:30]
        self.clean_slug()

        super(Collection, self).save(**kw)

    def clean_slug(self):
        if self.type in SPECIAL_SLUGS:
            self.slug = SPECIAL_SLUGS[self.type]
            return

        if self.slug in SPECIAL_SLUGS.values():
            self.slug += '~'

        if not self.author:
            return

        qs = self.author.collections.using('default')
        slugs = dict((slug, id) for slug, id in qs.values_list('slug', 'id'))
        if self.slug in slugs and slugs[self.slug] != self.id:
            for idx in range(len(slugs)):
                new = '%s-%s' % (self.slug, idx + 1)
                if new not in slugs:
                    self.slug = new
                    return

    def get_url_path(self):
        return reverse('collections.detail', args=[self.author_id, self.slug])

    def get_abs_url(self):
        return absolutify(self.get_url_path())

    def edit_url(self):
        return reverse('collections.edit', args=[self.author_id, self.slug])

    def delete_url(self):
        return reverse('collections.delete', args=[self.author_id, self.slug])

    def share_url(self):
        return reverse('collections.share', args=[self.author_id, self.slug])

    def stats_url(self):
        return reverse('collections.stats', args=[self.author_id, self.slug])

    @classmethod
    def get_fallback(cls):
        return cls._meta.get_field('default_locale')

    def set_addons(self, addon_ids, comments=None):
        """Replace the current add-ons with a new list of add-on ids."""
        if comments is None:
            comments = {}
        order = {a: idx for idx, a in enumerate(addon_ids)}

        # Partition addon_ids into add/update/remove buckets.
        existing = set(
            self.addons.using('default').values_list('id', flat=True))
        add, update = [], []
        for addon in addon_ids:
            bucket = update if addon in existing else add
            bucket.append((addon, order[addon]))
        remove = existing.difference(addon_ids)
        now = datetime.now()

        with connection.cursor() as cursor:
            if remove:
                cursor.execute("DELETE FROM addons_collections "
                               "WHERE collection_id=%s AND addon_id IN (%s)" %
                               (self.id, ','.join(map(str, remove))))
                if self.listed:
                    for addon in remove:
                        activity.log_create(amo.LOG.REMOVE_FROM_COLLECTION,
                                            (Addon, addon), self)
            if add:
                insert = '(%s, %s, %s, NOW(), NOW())'
                values = [insert % (a, self.id, idx) for a, idx in add]
                cursor.execute("""
                    INSERT INTO addons_collections
                        (addon_id, collection_id, ordering, created, modified)
                    VALUES %s""" % ','.join(values))
                if self.listed:
                    for addon_id, idx in add:
                        activity.log_create(amo.LOG.ADD_TO_COLLECTION,
                                            (Addon, addon_id), self)
        for addon, ordering in update:
            (CollectionAddon.objects.filter(
                collection=self.id, addon=addon).update(ordering=ordering,
                                                        modified=now))

        for addon, comment in six.iteritems(comments):
            try:
                c = (CollectionAddon.objects.using('default').get(
                    collection=self.id, addon=addon))
            except CollectionAddon.DoesNotExist:
                pass
            else:
                c.comments = comment
                c.save(force_update=True)

        self.save()

    def add_addon(self, addon):
        "Adds an addon to the collection."
        CollectionAddon.objects.get_or_create(addon=addon, collection=self)
        if self.listed:
            activity.log_create(amo.LOG.ADD_TO_COLLECTION, addon, self)
        self.save()  # To invalidate Collection.

    def remove_addon(self, addon):
        CollectionAddon.objects.filter(addon=addon, collection=self).delete()
        if self.listed:
            activity.log_create(amo.LOG.REMOVE_FROM_COLLECTION, addon, self)
        self.save()  # To invalidate Collection.

    def owned_by(self, user):
        return user.id == self.author_id

    def can_view_stats(self, request):
        if request and request.user:
            return (self.owned_by(request.user) or acl.action_allowed(
                request, amo.permissions.COLLECTION_STATS_VIEW))
        return False

    def is_public(self):
        return self.listed

    def is_featured(self):
        return FeaturedCollection.objects.filter(collection=self).exists()

    @staticmethod
    def transformer(collections):
        if not collections:
            return
        author_ids = set(c.author_id for c in collections)
        authors = dict(
            (u.id, u) for u in UserProfile.objects.filter(id__in=author_ids))
        for c in collections:
            c.author = authors.get(c.author_id)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        from . import tasks
        if kwargs.get('raw'):
            return
        tasks.collection_meta.delay(instance.id)
        if instance.is_featured():
            Collection.update_featured_status(sender, instance, **kwargs)

    @staticmethod
    def post_delete(sender, instance, **kwargs):
        if kwargs.get('raw'):
            return
        if instance.is_featured():
            Collection.update_featured_status(sender, instance, **kwargs)

    @staticmethod
    def update_featured_status(sender, instance, **kwargs):
        from olympia.addons.tasks import index_addons
        addons = kwargs.get('addons',
                            [addon.id for addon in instance.addons.all()])
        if addons:
            clear_get_featured_ids_cache(None, None)
            index_addons.delay(addons)

    def check_ownership(self, request, require_owner, require_author,
                        ignore_disabled, admin):
        """
        Used by acl.check_ownership to see if request.user has permissions for
        the collection.
        """
        from olympia.access import acl
        return acl.check_collection_ownership(request, self, require_owner)
Ejemplo n.º 28
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'
        indexes = [
            models.Index(fields=('created', 'version'), name='created_idx'),
            models.Index(fields=('binary_components', ),
                         name='files_cedd2560'),
            models.Index(fields=('datestatuschanged', 'version'),
                         name='statuschanged_idx'),
            models.Index(fields=('status', ), name='status'),
        ]

    def __str__(self):
        return str(self.id)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, attachment=False):
        return self._make_download_url('downloads.file', attachment=attachment)

    def _make_download_url(self, view_name, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return url

    @classmethod
    def from_upload(cls, upload, version, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version and the parsed_data
        generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version)
        upload_path = force_str(nfd_str(upload.path))
        ext = force_str(os.path.splitext(upload_path)[1])
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload_path)
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload_path)
        file_.original_hash = file_.hash
        file_.save()

        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            optional_permissions = list(
                parsed_data.get('optional_permissions', []))

            # devtools_page isn't in permissions block but treated as one
            # if a custom devtools page is added by an addon
            if 'devtools_page' in parsed_data:
                permissions.append('devtools')

            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions or optional_permissions:
                WebextPermission.objects.create(
                    permissions=permissions,
                    optional_permissions=optional_permissions,
                    file=file_,
                )

        log.info('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload_path, file_.current_file_path)

        if upload.validation:
            validation = json.loads(upload.validation)
            FileValidation.from_json(file_, validation)

        return file_

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        with open(filename or self.current_file_path, 'rb') as fobj:
            return 'sha256:{}'.format(get_sha256(fobj))

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}
        (-{platform} for some of the old ones from back when we had multiple
         platforms)
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return '%s...%s' % (m.group('slug')[0:(maxlen - 3)], m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(
            reverse('downloads.latest', kwargs=kw),
            'addon-%s-latest%s' % (addon.pk, self.extension),
        )

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def fallback_file_path(self):
        """Fallback path in case the file was disabled/re-enabled and not yet
        moved - sort of the opposite to current_file_path. This should only be
        used for things like code search or git extraction where we really want
        the file contents no matter what."""
        return (self.file_path if self.current_file_path
                == self.guarded_file_path else self.guarded_file_path)

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source_path, destination_path, log_message):
        """Move a file from `source_path` to `destination_path` and delete the
        source directory if it's empty once the file has been successfully
        moved.

        Meant to move files from/to the guarded file path as they are disabled
        or re-enabled.

        IOError and UnicodeEncodeError are caught and logged."""
        log_message = force_str(log_message)
        try:
            if storage.exists(source_path):
                source_parent_path = os.path.dirname(source_path)
                log.info(
                    log_message.format(source=source_path,
                                       destination=destination_path))
                move_stored_file(source_path, destination_path)
                # Now that the file has been deleted, remove the directory if
                # it exists to prevent the main directory from growing too
                # much (#11464)
                remaining_dirs, remaining_files = storage.listdir(
                    source_parent_path)
                if len(remaining_dirs) == len(remaining_files) == 0:
                    storage.delete(source_parent_path)
        except (UnicodeEncodeError, IOError):
            msg = 'Move Failure: {} {}'.format(source_path, destination_path)
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a file from the public path to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        """Move a file from guarded file path to the public file path."""
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    @cached_property
    def permissions(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []

    @cached_property
    def optional_permissions(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate optional permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.optional_permissions
                if isinstance(p, str)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []
Ejemplo n.º 29
0
class Collection(ModelBase):
    id = PositiveAutoField(primary_key=True)

    uuid = models.UUIDField(blank=True, unique=True, null=True)
    name = TranslatedField(require_locale=False)
    slug = models.CharField(max_length=30, blank=True, null=True)

    # description can (and sometimes does) contain html and other unsanitized
    # content. It must be cleaned before display - NoURLsField just strips the
    # URL without doing any escaping.
    description = NoURLsField(require_locale=False)
    default_locale = models.CharField(
        max_length=10, default='en-US', db_column='defaultlocale'
    )
    listed = models.BooleanField(
        default=True, help_text='Collections are either listed or private.'
    )

    addon_count = models.PositiveIntegerField(default=0, db_column='addonCount')

    addons = models.ManyToManyField(
        Addon, through='CollectionAddon', related_name='collections'
    )
    author = models.ForeignKey(
        UserProfile, null=True, related_name='collections', on_delete=models.CASCADE
    )

    objects = CollectionManager()

    class Meta(ModelBase.Meta):
        db_table = 'collections'
        indexes = [
            models.Index(fields=('created',), name='collections_created_idx'),
            models.Index(fields=('listed',), name='collections_listed_idx'),
            models.Index(fields=('slug',), name='collections_slug_idx'),
        ]
        constraints = [
            models.UniqueConstraint(fields=('author', 'slug'), name='author_id'),
        ]

    def __str__(self):
        return f'{self.name} ({self.addon_count})'

    def save(self, **kw):
        if not self.uuid:
            self.uuid = uuid.uuid4()
        if not self.slug:
            # Work with both, strings (if passed manually on .create()
            # and UUID instances)
            self.slug = str(self.uuid).replace('-', '')[:30]
        self.clean_slug()

        super().save(**kw)

    def clean_slug(self):
        if not self.author:
            return

        qs = self.author.collections.using('default')
        slugs = {slug: id for slug, id in qs.values_list('slug', 'id')}
        if self.slug in slugs and slugs[self.slug] != self.id:
            for idx in range(len(slugs)):
                new = f'{self.slug}-{idx + 1}'
                if new not in slugs:
                    self.slug = new
                    return

    def get_url_path(self):
        return reverse('collections.detail', args=[self.author_id, self.slug])

    @classmethod
    def get_fallback(cls):
        return cls._meta.get_field('default_locale')

    def add_addon(self, addon):
        CollectionAddon.objects.get_or_create(addon=addon, collection=self)

    def remove_addon(self, addon):
        CollectionAddon.objects.filter(addon=addon, collection=self).delete()

    def owned_by(self, user):
        return user.id == self.author_id

    def is_public(self):
        return self.listed

    @staticmethod
    def transformer(collections):
        if not collections:
            return
        author_ids = {c.author_id for c in collections}
        authors = {u.id: u for u in UserProfile.objects.filter(id__in=author_ids)}
        for c in collections:
            c.author = authors.get(c.author_id)

    @staticmethod
    def post_save(sender, instance, **kwargs):
        from . import tasks

        if kwargs.get('raw'):
            return
        tasks.collection_meta.delay(instance.id)

    def check_ownership(
        self, request, require_owner, require_author, ignore_disabled, admin
    ):
        """
        Used by acl.check_ownership to see if request.user has permissions for
        the collection.
        """
        from olympia.access import acl

        return acl.check_collection_ownership(request, self, require_owner)
Ejemplo n.º 30
0
class File(OnChangeMixin, ModelBase):
    id = PositiveAutoField(primary_key=True)
    STATUS_CHOICES = amo.STATUS_CHOICES_FILE

    version = models.ForeignKey('versions.Version',
                                related_name='files',
                                on_delete=models.CASCADE)
    platform = models.PositiveIntegerField(
        choices=amo.SUPPORTED_PLATFORMS_CHOICES,
        default=amo.PLATFORM_ALL.id,
        db_column="platform_id")
    filename = models.CharField(max_length=255, default='')
    size = models.PositiveIntegerField(default=0)  # In bytes.
    hash = models.CharField(max_length=255, default='')
    # The original hash of the file, before we sign it, or repackage it in
    # any other way.
    original_hash = models.CharField(max_length=255, default='')
    jetpack_version = models.CharField(max_length=10, null=True, blank=True)
    status = models.PositiveSmallIntegerField(
        choices=STATUS_CHOICES.items(), default=amo.STATUS_AWAITING_REVIEW)
    datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
    is_restart_required = models.BooleanField(default=False)
    strict_compatibility = models.BooleanField(default=False)
    # The XPI contains JS that calls require("chrome").
    requires_chrome = models.BooleanField(default=False)
    reviewed = models.DateTimeField(null=True, blank=True)
    # The `binary` field is used to store the flags from amo-validator when it
    # finds files with binary extensions or files that may contain binary
    # content.
    binary = models.BooleanField(default=False)
    # The `binary_components` field is used to store the flag from
    # amo-validator when it finds "binary-components" in the chrome manifest
    # file, used for default to compatible.
    binary_components = models.BooleanField(default=False, db_index=True)
    # Serial number of the certificate use for the signature.
    cert_serial_num = models.TextField(blank=True)
    # Is the file signed by Mozilla?
    is_signed = models.BooleanField(default=False)
    # Is the file a multi-package?
    #     https://developer.mozilla.org/en-US/docs/Multiple_Item_Packaging
    is_multi_package = models.BooleanField(default=False)
    # Is the file an experiment (see bug 1220097)?
    is_experiment = models.BooleanField(default=False)
    # Is the file a WebExtension?
    is_webextension = models.BooleanField(default=False)
    # Is the file a special "Mozilla Signed Extension"
    # see https://wiki.mozilla.org/Add-ons/InternalSigning
    is_mozilla_signed_extension = models.BooleanField(default=False)
    # The user has disabled this file and this was its status.
    # STATUS_NULL means the user didn't disable the File - i.e. Mozilla did.
    original_status = models.PositiveSmallIntegerField(default=amo.STATUS_NULL)

    class Meta(ModelBase.Meta):
        db_table = 'files'

    def __unicode__(self):
        return unicode(self.id)

    def get_platform_display(self):
        return force_text(amo.PLATFORMS[self.platform].name)

    @property
    def has_been_validated(self):
        try:
            self.validation
        except FileValidation.DoesNotExist:
            return False
        else:
            return True

    @property
    def automated_signing(self):
        """True if this file is eligible for automated signing. This currently
        means that either its version is unlisted."""
        return self.version.channel == amo.RELEASE_CHANNEL_UNLISTED

    def get_file_cdn_url(self, attachment=False):
        """Return the URL for the file corresponding to this instance
        on the CDN."""
        if attachment:
            host = posixpath.join(user_media_url('addons'), '_attachments')
        else:
            host = user_media_url('addons')

        return posixpath.join(
            *map(force_bytes, [host, self.version.addon.id, self.filename]))

    def get_url_path(self, src, attachment=False):
        return self._make_download_url('downloads.file',
                                       src,
                                       attachment=attachment)

    def _make_download_url(self, view_name, src, attachment=False):
        kwargs = {'file_id': self.pk}
        if attachment:
            kwargs['type'] = 'attachment'
        url = os.path.join(reverse(view_name, kwargs=kwargs), self.filename)
        return absolutify(urlparams(url, src=src))

    @classmethod
    def from_upload(cls, upload, version, platform, parsed_data=None):
        """
        Create a File instance from a FileUpload, a Version, a platform id
        and the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results."""
        assert parsed_data is not None

        file_ = cls(version=version, platform=platform)
        upload.path = force_bytes(nfd_str(upload.path))
        ext = os.path.splitext(upload.path)[1]
        if ext == '.jar':
            ext = '.xpi'
        file_.filename = file_.generate_filename(extension=ext or '.xpi')
        # Size in bytes.
        file_.size = storage.size(upload.path)
        data = cls.get_jetpack_metadata(upload.path)
        if 'sdkVersion' in data and data['sdkVersion']:
            file_.jetpack_version = data['sdkVersion'][:10]
        file_.is_restart_required = parsed_data.get('is_restart_required',
                                                    False)
        file_.strict_compatibility = parsed_data.get('strict_compatibility',
                                                     False)
        file_.is_multi_package = parsed_data.get('is_multi_package', False)
        file_.is_experiment = parsed_data.get('is_experiment', False)
        file_.is_webextension = parsed_data.get('is_webextension', False)
        file_.is_mozilla_signed_extension = parsed_data.get(
            'is_mozilla_signed_extension', False)

        file_.hash = file_.generate_hash(upload.path)
        file_.original_hash = file_.hash

        if upload.validation:
            validation = json.loads(upload.validation)
            if validation['metadata'].get('requires_chrome'):
                file_.requires_chrome = True

        file_.save()
        if file_.is_webextension:
            permissions = list(parsed_data.get('permissions', []))
            # Add content_scripts host matches too.
            for script in parsed_data.get('content_scripts', []):
                permissions.extend(script.get('matches', []))
            if permissions:
                WebextPermission.objects.create(permissions=permissions,
                                                file=file_)

        log.debug('New file: %r from %r' % (file_, upload))

        # Move the uploaded file from the temp location.
        copy_stored_file(upload.path, file_.current_file_path)

        if upload.validation:
            FileValidation.from_json(file_, validation)

        return file_

    @classmethod
    def get_jetpack_metadata(cls, path):
        data = {'sdkVersion': None}
        try:
            zip_ = zipfile.ZipFile(path)
        except (zipfile.BadZipfile, IOError):
            # This path is not an XPI. It's probably an app manifest.
            return data
        if 'package.json' in zip_.namelist():
            data['sdkVersion'] = "jpm"
        else:
            name = 'harness-options.json'
            if name in zip_.namelist():
                try:
                    opts = json.load(zip_.open(name))
                except ValueError as exc:
                    log.info('Could not parse harness-options.json in %r: %s' %
                             (path, exc))
                else:
                    data['sdkVersion'] = opts.get('sdkVersion')
        return data

    def generate_hash(self, filename=None):
        """Generate a hash for a file."""
        hash = hashlib.sha256()
        with open(filename or self.current_file_path, 'rb') as obj:
            for chunk in iter(lambda: obj.read(1024), ''):
                hash.update(chunk)
        return 'sha256:%s' % hash.hexdigest()

    def generate_filename(self, extension=None):
        """
        Files are in the format of:
        {addon_name}-{version}-{apps}-{platform}
        """
        parts = []
        addon = self.version.addon
        # slugify drops unicode so we may end up with an empty string.
        # Apache did not like serving unicode filenames (bug 626587).
        extension = extension or '.xpi'
        name = slugify(addon.name).replace('-', '_') or 'addon'
        parts.append(name)
        parts.append(self.version.version)

        if addon.type not in amo.NO_COMPAT and self.version.compatible_apps:
            apps = '+'.join(
                sorted([a.shortername for a in self.version.compatible_apps]))
            parts.append(apps)

        if self.platform and self.platform != amo.PLATFORM_ALL.id:
            parts.append(amo.PLATFORMS[self.platform].shortname)

        self.filename = '-'.join(parts) + extension
        return self.filename

    _pretty_filename = re.compile(r'(?P<slug>[a-z0-7_]+)(?P<suffix>.*)')

    def pretty_filename(self, maxlen=20):
        """Displayable filename.

        Truncates filename so that the slug part fits maxlen.
        """
        m = self._pretty_filename.match(self.filename)
        if not m:
            return self.filename
        if len(m.group('slug')) < maxlen:
            return self.filename
        return u'%s...%s' % (m.group('slug')[0:(maxlen - 3)],
                             m.group('suffix'))

    def latest_xpi_url(self, attachment=False):
        addon = self.version.addon
        kw = {'addon_id': addon.slug}
        if self.platform != amo.PLATFORM_ALL.id:
            kw['platform'] = self.platform
        if attachment:
            kw['type'] = 'attachment'
        return os.path.join(reverse('downloads.latest', kwargs=kw),
                            'addon-%s-latest%s' % (addon.pk, self.extension))

    def eula_url(self):
        return reverse('addons.eula', args=[self.version.addon_id, self.id])

    @property
    def file_path(self):
        return os.path.join(user_media_path('addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def addon(self):
        return self.version.addon

    @property
    def guarded_file_path(self):
        return os.path.join(user_media_path('guarded_addons'),
                            str(self.version.addon_id), self.filename)

    @property
    def current_file_path(self):
        """Returns the current path of the file, whether or not it is
        guarded."""

        file_disabled = self.status == amo.STATUS_DISABLED
        addon_disabled = self.addon.is_disabled
        if file_disabled or addon_disabled:
            return self.guarded_file_path
        else:
            return self.file_path

    @property
    def extension(self):
        return os.path.splitext(self.filename)[-1]

    def move_file(self, source, destination, log_message):
        """Move a file from `source` to `destination`."""
        # Make sure we are passing bytes to Python's io system.
        source, destination = force_bytes(source), force_bytes(destination)

        try:
            if storage.exists(source):
                log.info(
                    log_message.format(source=source, destination=destination))
                move_stored_file(source, destination)
        except (UnicodeEncodeError, IOError):
            msg = 'Move Failure: {} {}'.format(force_bytes(source),
                                               force_bytes(destination))
            log.exception(msg)

    def hide_disabled_file(self):
        """Move a disabled file to the guarded file path."""
        if not self.filename:
            return
        src, dst = self.file_path, self.guarded_file_path
        self.move_file(src, dst,
                       'Moving disabled file: {source} => {destination}')

    def unhide_disabled_file(self):
        if not self.filename:
            return
        src, dst = self.guarded_file_path, self.file_path
        self.move_file(src, dst,
                       'Moving undisabled file: {source} => {destination}')

    _get_localepicker = re.compile('^locale browser ([\w\-_]+) (.*)$', re.M)

    @memoize(prefix='localepicker', timeout=None)
    def get_localepicker(self):
        """
        For a file that is part of a language pack, extract
        the chrome/localepicker.properties file and return as
        a string.
        """
        start = time.time()

        try:
            zip = SafeZip(self.file_path)
        except (zipfile.BadZipfile, IOError):
            return ''

        try:
            manifest = zip.read('chrome.manifest')
        except KeyError as e:
            log.info('No file named: chrome.manifest in file: %s' % self.pk)
            return ''

        res = self._get_localepicker.search(manifest)
        if not res:
            log.error('Locale browser not in chrome.manifest: %s' % self.pk)
            return ''

        try:
            p = res.groups()[1]
            if 'localepicker.properties' not in p:
                p = os.path.join(p, 'localepicker.properties')
            res = zip.extract_from_manifest(p)
        except (zipfile.BadZipfile, IOError) as e:
            log.error('Error unzipping: %s, %s in file: %s' % (p, e, self.pk))
            return ''
        except (ValueError, KeyError) as e:
            log.error('No file named: %s in file: %s' % (e, self.pk))
            return ''

        end = time.time() - start
        log.info('Extracted localepicker file: %s in %.2fs' % (self.pk, end))
        statsd.timing('files.extract.localepicker', (end * 1000))
        return res

    @property
    def webext_permissions(self):
        """Return permissions that should be displayed, with descriptions, in
        defined order:
        1) Either the match all permission, if present (e.g. <all-urls>), or
           match urls for sites (<all-urls> takes preference over match urls)
        2) nativeMessaging permission, if present
        3) other known permissions in alphabetical order
        """
        knowns = list(
            WebextPermissionDescription.objects.filter(
                name__in=self.webext_permissions_list))

        urls = []
        match_url = None
        for name in self.webext_permissions_list:
            if re.match(WebextPermissionDescription.MATCH_ALL_REGEX, name):
                match_url = WebextPermissionDescription.ALL_URLS_PERMISSION
            elif name == WebextPermission.NATIVE_MESSAGING_NAME:
                # Move nativeMessaging to front of the list
                for index, perm in enumerate(knowns):
                    if perm.name == WebextPermission.NATIVE_MESSAGING_NAME:
                        knowns.pop(index)
                        knowns.insert(0, perm)
                        break
            elif '//' in name:
                # Filter out match urls so we can group them.
                urls.append(name)
            # Other strings are unknown permissions we don't care about

        if match_url is None and len(urls) == 1:
            match_url = Permission(
                u'single-match',
                ugettext(u'Access your data for {name}').format(name=urls[0]))
        elif match_url is None and len(urls) > 1:
            details = (u'<details><summary>{copy}</summary><ul>{sites}</ul>'
                       u'</details>')
            copy = ugettext(u'Access your data on the following websites:')
            sites = ''.join(
                [u'<li>%s</li>' % jinja2_escape(name) for name in urls])
            match_url = Permission(
                u'multiple-match',
                mark_safe(details.format(copy=copy, sites=sites)))

        return ([match_url] if match_url else []) + knowns

    @cached_property
    def webext_permissions_list(self):
        if not self.is_webextension:
            return []
        try:
            # Filter out any errant non-strings included in the manifest JSON.
            # Remove any duplicate permissions.
            permissions = set()
            permissions = [
                p for p in self._webext_permissions.permissions
                if isinstance(p, basestring)
                and not (p in permissions or permissions.add(p))
            ]
            return permissions

        except WebextPermission.DoesNotExist:
            return []