Exemple #1
0
    def __init__(self, *args, **kw):
        super(NewWebappForm, self).__init__(*args, **kw)
        if not waffle.switch_is_active("allow-b2g-paid-submission"):
            del self.fields["paid"]

        if not waffle.switch_is_active("allow-packaged-app-uploads"):
            del self.fields["packaged"]
Exemple #2
0
def _resize_video(src, instance, lib=None, **kw):
    """
    Given a preview object and a file somewhere: encode into the full
    preview size and generate a thumbnail.
    """
    log.info('[1@None] Encoding video %s' % instance.pk)
    lib = lib or library
    if not lib:
        log.info('Video library not available for %s' % instance.pk)
        return

    video = lib(src)
    video.get_meta()
    if not video.is_valid():
        log.info('Video is not valid for %s' % instance.pk)
        return

    if waffle.switch_is_active('video-encode'):
        # Do the video encoding.
        try:
            video_file = video.get_encoded(mkt.ADDON_PREVIEW_SIZES[1])
        except Exception:
            log.info('Error encoding video for %s, %s' %
                     (instance.pk, video.meta), exc_info=True)
            return

    # Do the thumbnail next, this will be the signal that the
    # encoding has finished.
    try:
        thumbnail_file = video.get_screenshot(mkt.ADDON_PREVIEW_SIZES[0])
    except Exception:
        # We'll have this file floating around because the video
        # encoded successfully, or something has gone wrong in which case
        # we don't want the file around anyway.
        if waffle.switch_is_active('video-encode'):
            os.remove(video_file)
        log.info('Error making thumbnail for %s' % instance.pk, exc_info=True)
        return

    for path in (instance.thumbnail_path, instance.image_path):
        dirs = os.path.dirname(path)
        if not os.path.exists(dirs):
            os.makedirs(dirs)

    shutil.move(thumbnail_file, instance.thumbnail_path)
    if waffle.switch_is_active('video-encode'):
        # Move the file over, removing the temp file.
        shutil.move(video_file, instance.image_path)
    else:
        # We didn't re-encode the file.
        shutil.copyfile(src, instance.image_path)

    # Ensure everyone has read permission on the file.
    os.chmod(instance.image_path, 0644)
    os.chmod(instance.thumbnail_path, 0644)
    instance.sizes = {'thumbnail': mkt.ADDON_PREVIEW_SIZES[0],
                      'image': mkt.ADDON_PREVIEW_SIZES[1]}
    instance.save()
    log.info('Completed encoding video: %s' % instance.pk)
    return True
    def _create_course_list(self, course_ids):
        info = []
        course_data = {}

        # ccx courses are hidden on the course listing page unless enabled
        if not switch_is_active('enable_ccx_courses'):
            # filter ccx courses
            course_ids = [course_id for course_id in course_ids
                          if not isinstance(CourseKey.from_string(course_id), CCXLocator)]

        if self.course_api_enabled and switch_is_active('display_names_for_course_index'):

            # Get data for all courses in a single API call.
            _api_courses = self.get_courses()

            # Create a lookup table from the data.
            for course in _api_courses:
                course_data[course['id']] = course['name']

        for course_id in course_ids:
            info.append({'key': course_id, 'name': course_data.get(course_id)})

        info.sort(key=lambda course: (course.get('name', '') or course.get('key', '') or '').lower())

        return info
Exemple #4
0
    def test_list(self):
        # Precache waffle-switch to not rely on switch caching behavior
        switch_is_active('disco-recommendations')

        with self.assertNumQueries(11):
            # 11 queries:
            # - 1 to fetch the waffle switch 'disco-recommendations'
            # - 1 to fetch the discovery items
            # - 1 to fetch the add-ons (can't be joined with the previous one
            #   because we want to hit the Addon transformer)
            # - 1 to fetch add-ons translations
            # - 1 to fetch add-ons categories
            # - 1 to fetch add-ons current_version
            # - 1 to fetch the versions translations
            # - 1 to fetch the versions applications_versions
            # - 1 to fetch the versions files
            # - 1 to fetch the add-ons authors
            # - 1 to fetch the add-ons personas
            # - 1 to fetch the add-ons previews
            response = self.client.get(self.url, {'lang': 'en-US'})
        assert response.data

        discopane_items = DiscoveryItem.objects.all().filter(
            position__gt=0).order_by('position')
        assert response.data['count'] == len(discopane_items)
        assert response.data['results']

        for i, result in enumerate(response.data['results']):
            assert result['is_recommendation'] is False
            if 'theme_data' in result['addon']:
                self._check_disco_theme(result, discopane_items[i])
            else:
                self._check_disco_addon(result, discopane_items[i])
Exemple #5
0
    def test_switch_inactive_all_sites_override(self):
        name = 'myswitch'
        Switch.objects.create(name=name, active=False, site=self.site1)
        self.assertFalse(waffle.switch_is_active(get(), name))

        with self.settings(SITE_ID=2):
            self.assertFalse(waffle.switch_is_active(get(), name))
Exemple #6
0
 def test_switch_inactive_from_cache(self):
     """Do not make two queries for an existing inactive switch."""
     switch = Switch.objects.create(name='myswitch', active=False)
     # Get the value once so that it will be put into the cache
     assert not waffle.switch_is_active(switch.name)
     queries = len(connection.queries)
     assert not waffle.switch_is_active(switch.name)
     self.assertEqual(queries, len(connection.queries), 'We should only make one query.')
 def test_switch_active_from_cache(self):
     """Do not make two queries for an existing active switch."""
     switch = Switch.objects.create(name="myswitch", active=True)
     # Get the value once so that it will be put into the cache
     assert waffle.switch_is_active(switch.name)
     queries = len(connection.queries)
     assert waffle.switch_is_active(switch.name)
     eq_(queries, len(connection.queries), "We should only make one query.")
Exemple #8
0
    def test_switch_site_default(self):
        name = "myswitch"
        switch = Switch.objects.create(name=name, active=True)  # no site given

        self.assertTrue(waffle.switch_is_active(get(), name))

        with self.settings(SITE_ID=2):
            self.assertTrue(waffle.switch_is_active(get(), name))
Exemple #9
0
        def _wrapped_view(request, *args, **kwargs):
            if switch_name.startswith('!'):
                active = not switch_is_active(request, switch_name[1:])
            else:
                active = switch_is_active(request, switch_name)

            if not active:
                raise Http404
            return view(request, *args, **kwargs)
Exemple #10
0
    def test_switch_by_site(self):
        """ test that we can get different switch values by site """
        name = 'myswitch'
        Switch.objects.create(name=name, active=True, site=self.site1,
                              all_sites_override=False)
        self.assertTrue(waffle.switch_is_active(get(), name))

        with self.settings(SITE_ID=2):
            self.assertFalse(waffle.switch_is_active(get(), name))
Exemple #11
0
 def test_no_query(self):
     """Do not make two queries for a non-existent switch."""
     assert not Switch.objects.filter(name="foo").exists()
     queries = len(connection.queries)
     assert not waffle.switch_is_active("foo")
     assert len(connection.queries) > queries, "We should make one query."
     queries = len(connection.queries)
     assert not waffle.switch_is_active("foo")
     eq_(queries, len(connection.queries), "We should only make one query.")
Exemple #12
0
    def dispatch(self, request, *args, **kwargs):
        if self.waffle_switch.startswith('!'):
            active = not switch_is_active(self.waffle_switch[1:])
        else:
            active = switch_is_active(self.waffle_switch)

        if not active:
            raise Http404
        return super(WaffleSwitchMixin, self).dispatch(request, *args, **kwargs)
Exemple #13
0
def content_ratings_edit(request, addon_id, addon):
    if settings.DEBUG:
        messages.debug(request, "DEBUG mode on; you may use IARC id 0 with any code")
    initial = {}
    data = request.POST if request.method == "POST" else None

    if waffle.switch_is_active("iarc-upgrade-v2"):
        form_class = IARCV2ExistingCertificateForm
    else:
        try:
            app_info = addon.iarc_info
            initial["submission_id"] = app_info.submission_id
            initial["security_code"] = app_info.security_code
        except IARCInfo.DoesNotExist:
            pass
        form_class = IARCGetAppInfoForm

    form = form_class(data=data, initial=initial, app=addon)
    if request.method == "POST" and form.is_valid():
        try:
            form.save()
            return redirect(addon.get_dev_url("ratings"))
        except django_forms.ValidationError:
            pass  # Fall through to show the form error.

    # Save some information for _ratings_success_msg.
    if "ratings_edit" not in request.session:
        request.session["ratings_edit"] = {}
    last_rated = addon.last_rated_time()
    request.session["ratings_edit"][str(addon.id)] = {
        "app_status": addon.status,
        "rating_modified": last_rated.isoformat() if last_rated else None,
    }
    request.session.modified = True

    ctx = {
        "addon": addon,
        "app_name": get_iarc_app_title(addon),
        "form": form,
        "company": addon.latest_version.developer_name,
        "now": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
    }

    if waffle.switch_is_active("iarc-upgrade-v2"):
        try:
            iarc_request = addon.iarc_request
            outdated = datetime.now() - iarc_request.created > timedelta(hours=1)
            if outdated:
                # IARC request outdated. Re-create.
                iarc_request.delete()
                iarc_request = IARCRequest.objects.create(app=addon, uuid=uuid.uuid4())
        except IARCRequest.DoesNotExist:
            # No IARC request exists. Create.
            iarc_request = IARCRequest.objects.create(app=addon, uuid=uuid.uuid4())
        ctx["iarc_request_id"] = unicode(uuid.UUID(iarc_request.uuid))

    return render(request, "developers/apps/ratings/ratings_edit.html", ctx)
Exemple #14
0
def manifest(request):

    form = forms.NewWebappForm(request.POST or None, request=request)

    features_form = forms.AppFeaturesForm(request.POST or None)
    features_form_valid = (True if not waffle.switch_is_active('buchets')
                           else features_form.is_valid())

    if (request.method == 'POST' and form.is_valid()
        and features_form_valid):

        with transaction.commit_on_success():

            addon = Addon.from_upload(
                form.cleaned_data['upload'],
                [Platform.objects.get(id=amo.PLATFORM_ALL.id)],
                is_packaged=form.is_packaged())

            # Set the device type.
            for device in form.get_devices():
                addon.addondevicetype_set.get_or_create(
                    device_type=device.id)

            # Set the premium type, only bother if it's not free.
            premium = form.get_paid()
            if premium:
                addon.update(premium_type=premium)

            if addon.has_icon_in_manifest():
                # Fetch the icon, do polling.
                addon.update(icon_type='image/png')
            else:
                # In this case there is no need to do any polling.
                addon.update(icon_type='')

            AddonUser(addon=addon, user=request.amo_user).save()
            # Checking it once. Checking it twice.
            AppSubmissionChecklist.objects.create(addon=addon, terms=True,
                                                  manifest=True)

            # Create feature profile.
            if waffle.switch_is_active('buchets'):
                addon.current_version.features.update(
                    **features_form.cleaned_data)

        # Call task outside of `commit_on_success` to avoid it running before
        # the transaction is committed and not finding the app.
        tasks.fetch_icon.delay(addon)

        return redirect('submit.app.details', addon.app_slug)

    return jingo.render(request, 'submit/manifest.html', {
        'step': 'manifest',
        'features_form': features_form,
        'form': form,
        'DEVICE_LOOKUP': DEVICE_LOOKUP
    })
Exemple #15
0
 def test_no_query(self):
     """Do not make two queries for a non-existent switch."""
     assert not Switch.objects.filter(name='foo').exists()
     queries = len(connection.queries)
     assert not waffle.switch_is_active('foo')
     assert len(connection.queries) > queries, 'We should make one query.'
     queries = len(connection.queries)
     assert not waffle.switch_is_active('foo')
     self.assertEqual(queries, len(connection.queries), 'We should only make one query.')
Exemple #16
0
def check_xpi_info(xpi_info, addon=None):
    from olympia.addons.models import Addon, DeniedGuid
    guid = xpi_info['guid']
    is_webextension = xpi_info.get('is_webextension', False)

    # If we allow the guid to be omitted we assume that one was generated
    # or existed before and use that one.
    # An example are WebExtensions that don't require a guid but we generate
    # one once they're uploaded. Now, if you update that WebExtension we
    # just use the original guid.
    if addon and not guid and is_webextension:
        xpi_info['guid'] = guid = addon.guid
    if not guid and not is_webextension:
        raise forms.ValidationError(ugettext('Could not find an add-on ID.'))

    if guid:
        current_user = core.get_user()
        if current_user:
            deleted_guid_clashes = Addon.unfiltered.exclude(
                authors__id=current_user.id).filter(guid=guid)
        else:
            deleted_guid_clashes = Addon.unfiltered.filter(guid=guid)
        guid_too_long = (
            not waffle.switch_is_active('allow-long-addon-guid') and
            len(guid) > 64
        )
        if guid_too_long:
            raise forms.ValidationError(
                ugettext('Add-on ID must be 64 characters or less.'))
        if addon and addon.guid != guid:
            msg = ugettext(
                'The add-on ID in your manifest.json or install.rdf (%s) '
                'does not match the ID of your add-on on AMO (%s)')
            raise forms.ValidationError(msg % (guid, addon.guid))
        if (not addon and
            # Non-deleted add-ons.
            (Addon.objects.filter(guid=guid).exists() or
             # DeniedGuid objects for legacy deletions.
             DeniedGuid.objects.filter(guid=guid).exists() or
             # Deleted add-ons that don't belong to the uploader.
             deleted_guid_clashes.exists())):
            raise forms.ValidationError(ugettext('Duplicate add-on ID found.'))
    if len(xpi_info['version']) > 32:
        raise forms.ValidationError(
            ugettext('Version numbers should have fewer than 32 characters.'))
    if not VERSION_RE.match(xpi_info['version']):
        raise forms.ValidationError(
            ugettext('Version numbers should only contain letters, numbers, '
                     'and these punctuation characters: +*.-_.'))

    if is_webextension and xpi_info.get('is_static_theme', False):
        if not waffle.switch_is_active('allow-static-theme-uploads'):
            raise forms.ValidationError(ugettext(
                'WebExtension theme uploads are currently not supported.'))

    return xpi_info
Exemple #17
0
    def test_new_switch(self):
        assert not Switch.objects.filter(name="foo").exists()

        with override_switch("foo", active=True):
            assert waffle.switch_is_active(req(), "foo")

        with override_switch("foo", active=False):
            assert not waffle.switch_is_active(req(), "foo")

        assert not Switch.objects.filter(name="foo").exists()
    def test_new_switch(self):
        assert not Switch.objects.filter(name='foo').exists()

        with override_switch('foo', active=True):
            assert waffle.switch_is_active('foo')

        with override_switch('foo', active=False):
            assert not waffle.switch_is_active('foo')

        assert not Switch.objects.filter(name='foo').exists()
Exemple #19
0
    def test_switch_existed_and_was_active(self):
        Switch.objects.create(name="foo", active=True)

        with override_switch("foo", active=True):
            assert waffle.switch_is_active(req(), "foo")

        with override_switch("foo", active=False):
            assert not waffle.switch_is_active(req(), "foo")

        # make sure it didn't change 'active' value
        assert Switch.objects.get(name="foo").active
Exemple #20
0
    def test_switch_set_active(self):
        # Setting non-existing switch creates it
        switch1 = waffle.switch_set_active('foo', True)
        assert waffle.switch_is_active('foo')

        # Setting existing switch overwrites it
        switch2 = waffle.switch_set_active('foo', False)
        assert not waffle.switch_is_active('foo')

        # This is the same switch
        assert switch1 == switch2
    def test_switch_existed_and_was_NOT_active(self):
        Switch.objects.create(name='foo', active=False)

        with override_switch('foo', active=True):
            assert waffle.switch_is_active('foo')

        with override_switch('foo', active=False):
            assert not waffle.switch_is_active('foo')

        # make sure it didn't change 'active' value
        assert not Switch.objects.get(name='foo').active
    def test_cache_is_flushed_by_testutils_even_in_transaction(self):
        Switch.objects.create(name='foo', active=True)

        with transaction.atomic():
            with override_switch('foo', active=True):
                assert waffle.switch_is_active('foo')

            with override_switch('foo', active=False):
                assert not waffle.switch_is_active('foo')

        assert waffle.switch_is_active('foo')
Exemple #23
0
 def perform_search_with_senior_editor(
         self, url, data=None, expected_queries_count=3, **headers):
     # Just to cache the waffle switch, to avoid polluting the
     # assertNumQueries() call later
     waffle.switch_is_active('boost-webextensions-in-search')
     # We are expecting 3 SQL queries by default, because we need
     # to load the user and its groups.
     self.client.login_api(
         UserProfile.objects.get(email='*****@*****.**'))
     return self.perform_search(
         url, data=data, expected_queries_count=expected_queries_count,
         **headers)
Exemple #24
0
    def test_switch_by_multisite(self):
        name = "myswitch"
        switch1 = Switch.objects.create(name=name, active=True, site=self.site1,
                                        all_sites_override=False)
        switch1.site.add(self.site2)
        switch1.site.add(self.site3)

        self.assertTrue(waffle.switch_is_active(get(), name))
        with self.settings(SITE_ID=2):
            self.assertTrue(waffle.switch_is_active(get(), name))
        with self.settings(SITE_ID=3):
            self.assertTrue(waffle.switch_is_active(get(), name))
        with self.settings(SITE_ID=4):
            self.assertFalse(waffle.switch_is_active(get(), name))
Exemple #25
0
def queue_tabnav(context):
    """
    Returns tuple of tab navigation for the queue pages.

    Each tuple contains three elements: (named_url. tab_code, tab_text)
    """
    request = context['request']
    counts = context['queue_counts']
    apps_reviewing = AppsReviewing(request).get_apps()

    # Apps.
    if acl.action_allowed(request, 'Apps', 'Review'):
        rv = [
            ('reviewers.apps.queue_pending', 'pending',
             _('Apps ({0})', counts['pending']).format(counts['pending'])),

            ('reviewers.apps.queue_rereview', 'rereview',
             _('Re-reviews ({0})', counts['rereview']).format(
             counts['rereview'])),

            ('reviewers.apps.queue_updates', 'updates',
             _('Updates ({0})', counts['updates']).format(counts['updates'])),
        ]
        if acl.action_allowed(request, 'Apps', 'ReviewEscalated'):
            rv.append(('reviewers.apps.queue_escalated', 'escalated',
                       _('Escalations ({0})', counts['escalated']).format(
                       counts['escalated'])))
        rv.extend([
            ('reviewers.apps.queue_moderated', 'moderated',
             _('Moderated Reviews ({0})', counts['moderated'])
             .format(counts['moderated'])),

            ('reviewers.apps.apps_reviewing', 'reviewing',
             _('Reviewing ({0})').format(len(apps_reviewing))),
        ])
    else:
        rv = []

    # Themes.
    if (acl.action_allowed(request, 'Personas', 'Review') and
        waffle.switch_is_active('mkt-themes')):
        rv.append(('reviewers.themes.list', 'themes',
                  _('Themes ({0})').format(counts['themes']),))

    if waffle.switch_is_active('buchets') and 'pro' in request.GET:
        device_srch = device_queue_search(request)
        rv.append(('reviewers.apps.queue_device', 'device',
                  _('Device ({0})').format(device_srch.count()),))

    return rv
        def _wrapped_view(request, *args, **kwargs):
            if switch_name.startswith('!'):
                active = not switch_is_active(switch_name[1:])
            else:
                active = switch_is_active(switch_name)

            if not active:
                response_to_redirect_to = get_response_to_redirect(redirect_to)
                if response_to_redirect_to:
                    return response_to_redirect_to
                else:
                    raise Http404

            return view(request, *args, **kwargs)
Exemple #27
0
    def test_read_from_write_db(self):
        switch = Switch.objects.create(name='switch', active=True)

        # By default, switch_is_active should hit whatever it configured as the
        # read DB (so values will be stale if replication is lagged).
        assert not waffle.switch_is_active(switch.name)

        with override_settings(WAFFLE_READ_FROM_WRITE_DB=True):
            # Save the switch again to flush the cache.
            switch.save()

            # The next read should now be directed to the write DB, ensuring
            # the cache and DB are in sync.
            assert waffle.switch_is_active(switch.name)
Exemple #28
0
def _collections(request):
    """Handle the request for collections."""

    # Sorting by relevance isn't an option. Instead the default is `weekly`.
    initial = dict(sort='weekly')
    # Update with GET variables.
    initial.update(request.GET.items())
    # Ignore appver/platform and set default number of collections per page.
    initial.update(appver=None, platform=None, pp=DEFAULT_NUM_COLLECTIONS)

    form = SecondarySearchForm(initial)
    form.is_valid()

    if waffle.switch_is_active('replace-sphinx'):
        qs = Collection.search().filter(listed=True, app=request.APP.id)
        filters = ['sort']
        mapping = {'weekly': '-weekly_subscribers',
                   'monthly': '-monthly_subscribers',
                   'all': '-subscribers',
                   'rating': '-rating',
                   'created': '-created',
                   'name': 'name_sort',
                   'updated': '-modified'}
        results = _filter_search(request, qs, form.cleaned_data, filters,
                                 sorting=mapping,
                                 sorting_default='-weekly_subscribers',
                                 types=amo.COLLECTION_SEARCH_CHOICES)

    query = form.cleaned_data.get('q', '')

    search_opts = {}
    search_opts['limit'] = form.cleaned_data.get('pp', DEFAULT_NUM_COLLECTIONS)
    page = form.cleaned_data.get('page') or 1
    search_opts['offset'] = (page - 1) * search_opts['limit']
    search_opts['sort'] = form.cleaned_data.get('sort')

    if not waffle.switch_is_active('replace-sphinx'):
        # The new hotness calls this `created`. Sphinx still calls it `newest`.
        if search_opts['sort'] == 'created':
            search_opts['sort'] = 'newest'
        try:
            results = CollectionsClient().query(query, **search_opts)
        except SearchError:
            return jingo.render(request, 'search/down.html', {}, status=503)

    pager = amo.utils.paginate(request, results, per_page=search_opts['limit'])
    c = dict(pager=pager, form=form, query=query, opts=search_opts,
             filter=bandwagon.views.get_filter(request),
             search_placeholder='collections')
    return jingo.render(request, 'search/collections.html', c)
Exemple #29
0
def _personas(request):
    """Handle the request for persona searches."""

    initial = dict(request.GET.items())

    if waffle.switch_is_active('replace-sphinx'):
        # Ignore these filters since return the same results for Firefox
        # as for Thunderbird, etc.
        initial.update(appver=None, platform=None)

        form = ESSearchForm(initial, type=amo.ADDON_PERSONA)
        form.is_valid()

        qs = Addon.search().filter(status__in=amo.REVIEWED_STATUSES,
                                   is_disabled=False)
        filters = ['sort']
        mapping = {'downloads': '-weekly_downloads',
                   'users': '-average_daily_users',
                   'rating': '-bayesian_rating',
                   'created': '-created',
                   'name': 'name_sort',
                   'updated': '-last_updated',
                   'hotness': '-hotness'}
        results = _filter_search(request, qs, form.cleaned_data, filters,
                                 sorting=mapping, types=[amo.ADDON_PERSONA])
    else:
        # Set the default number of personas per page.
        initial.update(pp=DEFAULT_NUM_PERSONAS)
        form = SecondarySearchForm(initial)
        form.is_valid()

    query = form.cleaned_data.get('q', '')

    search_opts = {}
    search_opts['limit'] = form.cleaned_data.get('pp', DEFAULT_NUM_PERSONAS)
    page = form.cleaned_data.get('page') or 1
    search_opts['offset'] = (page - 1) * search_opts['limit']

    if not waffle.switch_is_active('replace-sphinx'):
        try:
            results = PersonasClient().query(query, **search_opts)
        except SearchError:
            return jingo.render(request, 'search/down.html', {}, status=503)

    pager = amo.utils.paginate(request, results, per_page=search_opts['limit'])
    categories, filter, _, _ = browse.views.personas_listing(request)
    c = dict(pager=pager, form=form, categories=categories, query=query,
             filter=filter, search_placeholder='personas')
    return jingo.render(request, 'search/personas.html', c)
Exemple #30
0
def email_daily_ratings():
    """
    Does email for yesterday's ratings (right after the day has passed).
    Sends an email containing all reviews for that day for certain app.
    """
    if not waffle.switch_is_active('ratings'):
        return

    dt = datetime.datetime.today() - datetime.timedelta(1)
    yesterday = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0)
    today = yesterday + datetime.timedelta(1)
    pretty_date = '%04d-%02d-%02d' % (dt.year, dt.month, dt.day)

    yesterday_reviews = Review.objects.filter(created__gte=yesterday,
                                              created__lt=today,
                                              addon__type=amo.ADDON_WEBAPP)

    # For each app in yesterday's set of reviews, gather reviews and email out.
    apps = set(review.addon for review in yesterday_reviews)
    for app in apps:
        # Email all reviews in one email for current app in loop.
        author_emails = app.authors.values_list('email', flat=True)
        subject = 'Firefox Marketplace reviews for %s on %s' % (app.name,
                                                                pretty_date)

        context = {'reviews': (yesterday_reviews.filter(addon=app).
                               order_by('-created')),
                   'base_url': settings.SITE_URL,
                   'pretty_date': pretty_date}

        send_mail_jinja(subject, 'ratings/emails/daily_digest.html',
                        context, recipient_list=author_emails,
                        perm_setting='app_new_review')
Exemple #31
0
    def get(self, request):  # pylint: disable=too-many-statements
        """ Calculate basket totals given a list of sku's

        Create a temporary basket add the sku's and apply an optional voucher code.
        Then calculate the total price less discounts. If a voucher code is not
        provided apply a voucher in the Enterprise entitlements available
        to the user.

        Query Params:
            sku (string): A list of sku(s) to calculate
            code (string): Optional voucher code to apply to the basket.
            username (string): Optional username of a user for which to calculate the basket.

        Returns:
            JSON: {
                    'total_incl_tax_excl_discounts': basket.total_incl_tax_excl_discounts,
                    'total_incl_tax': basket.total_incl_tax,
                    'currency': basket.currency
                }
        """
        RequestCache.set(TEMPORARY_BASKET_CACHE_KEY,
                         True)  # TODO: LEARNER 5463

        partner = get_partner_for_site(request)
        skus = request.GET.getlist('sku')
        if not skus:
            return HttpResponseBadRequest(_('No SKUs provided.'))
        skus.sort()

        code = request.GET.get('code', None)
        try:
            voucher = Voucher.objects.get(code=code) if code else None
        except Voucher.DoesNotExist:
            voucher = None

        products = Product.objects.filter(stockrecords__partner=partner,
                                          stockrecords__partner_sku__in=skus)
        if not products:
            return HttpResponseBadRequest(
                _('Products with SKU(s) [{skus}] do not exist.').format(
                    skus=', '.join(skus)))

        # If there is only one product apply an Enterprise entitlement voucher
        if not voucher and len(products) == 1:
            voucher = get_entitlement_voucher(request, products[0])

        basket_owner = request.user

        if waffle.switch_is_active(
                "force_anonymous_user_response_for_basket_calculate"):
            # Use the anonymous user program price for all users
            requested_username = ''
            is_anonymous = True
        else:
            requested_username = request.GET.get('username', default='')
            is_anonymous = request.GET.get('is_anonymous',
                                           'false').lower() == 'true'

        use_default_basket = is_anonymous

        # validate query parameters
        if requested_username and is_anonymous:
            return HttpResponseBadRequest(
                _('Provide username or is_anonymous query param, but not both')
            )
        elif not requested_username and not is_anonymous:
            logger.warning(
                "Request to Basket Calculate must supply either username or is_anonymous query"
                " param. Requesting user=%s. Future versions of this API will treat this "
                "WARNING as an ERROR and raise an exception.",
                basket_owner.username)
            requested_username = request.user.username

        # If a username is passed in, validate that the user has staff access or is the same user.
        if requested_username:
            if basket_owner.username.lower() == requested_username.lower():
                pass
            elif basket_owner.is_staff:
                try:
                    basket_owner = User.objects.get(
                        username=requested_username)
                except User.DoesNotExist:
                    # This case represents a user who is logged in to marketing, but
                    # doesn't yet have an account in ecommerce. These users have
                    # never purchased before.
                    use_default_basket = True
            else:
                return HttpResponseForbidden('Unauthorized user credentials')

        if basket_owner.username == self.MARKETING_USER and not use_default_basket:
            # For legacy requests that predate is_anonymous parameter, we will calculate
            # an anonymous basket if the calculated user is the marketing user.
            # TODO: LEARNER-5057: Remove this special case for the marketing user
            # once logs show no more requests with no parameters (see above).
            use_default_basket = True

        if use_default_basket:
            basket_owner = None

        cache_key = None
        if use_default_basket:
            # For an anonymous user we can directly get the cached price, because
            # there can't be any enrollments or entitlements.
            cache_key = get_cache_key(site_comain=request.site,
                                      resource_name='calculate',
                                      skus=skus)
            cached_response = TieredCache.get_cached_response(cache_key)
            if cached_response.is_hit:
                return Response(cached_response.value)

        # There are too many open questions around dropping the atomic transaction for a user's basket,
        # including how user basket merges was coded.  For now, only allow disabling the atomic
        # transaction if we are also forcing the anonymous basket response for all users.
        if waffle.flag_is_active(request, "disable_calculate_temporary_basket_atomic_transaction")\
                and waffle.switch_is_active("force_anonymous_user_response_for_basket_calculate"):
            response = self._calculate_temporary_basket(
                basket_owner, request, products, voucher, skus, code)
        else:
            response = self._calculate_temporary_basket_atomic(
                basket_owner, request, products, voucher, skus, code)

        if response and use_default_basket:
            TieredCache.set_all_tiers(
                cache_key, response,
                settings.ANONYMOUS_BASKET_CALCULATE_CACHE_TIMEOUT)

        return Response(response)
Exemple #32
0
def payments(request, addon_id, addon, webapp=False):
    premium_form = forms_payments.PremiumForm(request.POST or None,
                                              request=request,
                                              addon=addon,
                                              user=request.amo_user)

    region_form = forms.RegionForm(request.POST or None,
                                   product=addon,
                                   request=request)

    upsell_form = forms_payments.UpsellForm(request.POST or None,
                                            addon=addon,
                                            user=request.amo_user)

    bango_account_list_form = forms_payments.BangoAccountListForm(
        request.POST or None, addon=addon, user=request.amo_user)

    if request.method == 'POST':

        success = all(
            form.is_valid() for form in
            [premium_form, region_form, upsell_form, bango_account_list_form])

        if success:
            region_form.save()

            try:
                premium_form.save()
            except client.Error as err:
                success = False
                log.error('Error setting payment information (%s)' % err)
                messages.error(
                    request,
                    _(u'We encountered a problem connecting to the '
                      u'payment server.'))
                raise  # We want to see these exceptions!

            is_free_inapp = addon.premium_type == amo.ADDON_FREE_INAPP
            is_now_paid = (addon.premium_type in amo.ADDON_PREMIUMS
                           or is_free_inapp)

            # If we haven't changed to a free app, check the upsell.
            if is_now_paid and success:
                try:
                    if not is_free_inapp:
                        upsell_form.save()
                    bango_account_list_form.save()
                except client.Error as err:
                    log.error('Error saving payment information (%s)' % err)
                    messages.error(
                        request,
                        _(u'We encountered a problem connecting to '
                          u'the payment server.'))
                    success = False
                    raise  # We want to see all the solitude errors now.

        # If everything happened successfully, give the user a pat on the back.
        if success:
            messages.success(request, _('Changes successfully saved.'))
            return redirect(addon.get_dev_url('payments'))

    # TODO: This needs to be updated as more platforms support payments.
    cannot_be_paid = (addon.premium_type == amo.ADDON_FREE and any(
        premium_form.device_data['free-%s' % x] == y
        for x, y in [('android-mobile',
                      True), ('android-tablet',
                              True), ('desktop', True), ('firefoxos', False)]))

    try:
        tier_zero = Price.objects.get(price='0.00', active=True)
        tier_zero_id = tier_zero.pk
    except Price.DoesNotExist:
        tier_zero = None
        tier_zero_id = ''

    # Get the regions based on tier zero. This should be all the
    # regions with payments enabled.
    paid_region_ids_by_slug = []
    if tier_zero:
        paid_region_ids_by_slug = tier_zero.region_ids_by_slug()

    return jingo.render(
        request,
        'developers/payments/premium.html',
        {
            'addon':
            addon,
            'webapp':
            webapp,
            'premium':
            addon.premium,
            'form':
            premium_form,
            'upsell_form':
            upsell_form,
            'tier_zero_id':
            tier_zero_id,
            'region_form':
            region_form,
            'DEVICE_LOOKUP':
            DEVICE_LOOKUP,
            'is_paid': (addon.premium_type in amo.ADDON_PREMIUMS
                        or addon.premium_type == amo.ADDON_FREE_INAPP),
            'no_paid':
            cannot_be_paid,
            'is_incomplete':
            addon.status == amo.STATUS_NULL,
            'is_packaged':
            addon.is_packaged,
            # Bango values
            'bango_account_form':
            forms_payments.BangoPaymentAccountForm(),
            'bango_account_list_form':
            bango_account_list_form,
            # Waffles
            'payments_enabled':
            waffle.flag_is_active(request, 'allow-b2g-paid-submission')
            and not waffle.switch_is_active('disabled-payments'),
            'api_pricelist_url':
            reverse('api_dispatch_list',
                    kwargs={
                        'resource_name': 'prices',
                        'api_name': 'webpay'
                    }),
            'payment_methods': {
                PAYMENT_METHOD_ALL: _('All'),
                PAYMENT_METHOD_CARD: _('Credit card'),
                PAYMENT_METHOD_OPERATOR: _('Carrier'),
            },
            'all_paid_region_ids_by_slug':
            paid_region_ids_by_slug,
        })
Exemple #33
0
def sign_file(file_obj):
    """Sign a File if necessary.

    If it's not necessary (file exists but it's a mozilla signed one) then
    return the file directly.

    If there's no endpoint (signing is not enabled) or isn't reviewed yet,
    or there was an error while signing, raise an exception - it
    shouldn't happen.

    Otherwise proceed with signing and return the signed file.
    """
    from olympia.git.utils import create_git_extraction_entry

    if not settings.ENABLE_ADDON_SIGNING:
        raise SigningError(
            f'Not signing file {file_obj.pk}: no active endpoint')

    # No file? No signature.
    if not os.path.exists(file_obj.current_file_path):
        raise SigningError(
            f"File {file_obj.current_file_path} doesn't exist on disk")

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        # Don't raise an exception here, just log and return file_obj even
        # though we didn't sign, it's not an error - we just don't need to do
        # anything in this case.
        log.info('Not signing file {}: mozilla signed extension is already '
                 'signed'.format(file_obj.pk))
        return file_obj

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        raise SigningError(
            'Not signing version {}: not for a Firefox version we support'.
            format(file_obj.version.pk))

    # Get the path before call_signing modifies it... We'll delete it after if
    # signing was successful and we ended up changing it.
    old_path = file_obj.current_file_path

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = str(call_signing(file_obj))

    size = storage.size(file_obj.current_file_path)

    # Save the certificate serial number for revocation if needed, change the
    # filename to use a .xpi extension (cachebusting anything that depends on
    # the filename with the old .zip extension) and re-hash the file now that
    # it's been signed.
    file_obj.update(
        cert_serial_num=cert_serial_num,
        hash=file_obj.generate_hash(),
        size=size,
        # We-specify filename and is_signed that we already updated on the
        # instance without saving, otherwise those wouldn't get updated.
        filename=file_obj.filename,
        is_signed=file_obj.is_signed,
    )
    log.info(f'Signing complete for file {file_obj.pk}')

    if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
        # Schedule this version for git extraction.
        transaction.on_commit(
            lambda: create_git_extraction_entry(version=file_obj.version))

    # Remove old unsigned path if necessary.
    if old_path != file_obj.current_file_path:
        storage.delete(old_path)

    return file_obj
Exemple #34
0
 def __init__(self, *args, **kwargs):
     super(AuthorForm, self).__init__(*args, **kwargs)
     self.fields['role'].choices = ((c, s) for c, s in amo.AUTHOR_CHOICES
                                    if c != amo.AUTHOR_ROLE_SUPPORT or
                                    waffle.switch_is_active('allow-refund'))
Exemple #35
0
def call_mad_api(all_results, upload_pk):
    """
    Call the machine learning API (mad-server) for a given FileUpload.

    This task is the callback of the Celery chord in the validation chain. It
    receives all the results returned by all the tasks in this chord.

    - `all_results` are the results returned by all the tasks in the chord.
    - `upload_pk` is the FileUpload ID.
    """
    # This task is the callback of a Celery chord and receives all the results
    # returned by all the tasks in this chord. The first task registered in the
    # chord is `forward_linter_results()`:
    results = all_results[0]

    # In case of a validation (linter) error, we do want to skip this task.
    # This is similar to the behavior of all other tasks decorated with
    # `@validation_task` but, because this task is the callback of a Celery
    # chord, we cannot use this decorator.
    if results['errors'] > 0:
        return results

    if not waffle.switch_is_active('enable-mad'):
        log.debug('Skipping scanner "mad" task, switch is off')
        return results

    log.info('Starting scanner "mad" task for FileUpload %s.', upload_pk)

    if not results['metadata']['is_webextension']:
        log.info(
            'Not calling scanner "mad" for FileUpload %s, it is not '
            'a webextension.',
            upload_pk,
        )
        return results

    try:
        # TODO: retrieve all scanner results and pass each result to the API.
        customs_results = ScannerResult.objects.get(
            upload_id=upload_pk, scanner=CUSTOMS
        )

        with statsd.timer('devhub.mad'):
            json_payload = {'scanners': {'customs': customs_results.results}}
            response = requests.post(
                url=settings.MAD_API_URL,
                json=json_payload,
                timeout=settings.MAD_API_TIMEOUT,
            )

        try:
            data = response.json()
        except ValueError:
            # Log the response body when JSON decoding has failed.
            raise ValueError(response.text)

        if response.status_code != 200:
            raise ValueError(data)

        default_score = -1
        ScannerResult.objects.create(
            upload_id=upload_pk,
            scanner=MAD,
            results=data,
            score=data.get('ensemble', default_score),
        )

        # Update the individual scanner results scores.
        customs_score = (
            data.get('scanners', {})
            .get('customs', {})
            .get('score', default_score)
        )
        customs_results.update(score=customs_score)

        statsd.incr('devhub.mad.success')
        log.info('Ending scanner "mad" task for FileUpload %s.', upload_pk)
    except Exception:
        statsd.incr('devhub.mad.failure')
        # We log the exception but we do not raise to avoid perturbing the
        # submission flow.
        log.exception(
            'Error in scanner "mad" task for FileUpload %s.', upload_pk
        )

    return results
def global_settings(request):
    """
    Storing standard AMO-wide information used in global headers, such as
    account links and settings.
    """
    account_links = []
    tools_links = []
    context = {}

    tools_title = ugettext('Tools')
    is_reviewer = False

    # We're using `getattr` here because `request.user` can be missing,
    # e.g in case of a 500-server error.
    if getattr(request, 'user', AnonymousUser()).is_authenticated:
        is_reviewer = acl.is_user_any_kind_of_reviewer(request.user)

        account_links.append({'text': ugettext('My Profile'),
                              'href': request.user.get_url_path()})

        account_links.append({'text': ugettext('Account Settings'),
                              'href': reverse('users.edit')})
        account_links.append({
            'text': ugettext('My Collections'),
            'href': reverse('collections.user', args=[request.user.id])})

        if request.user.favorite_addons:
            account_links.append(
                {'text': ugettext('My Favorites'),
                 'href': reverse('collections.detail',
                                 args=[request.user.id, 'favorites'])})

        account_links.append({
            'text': ugettext('Log out'),
            'href': reverse('users.logout') + '?to=' + urlquote(request.path),
        })

        if request.user.is_developer:
            tools_links.append({'text': ugettext('Manage My Submissions'),
                                'href': reverse('devhub.addons')})
        tools_links.append(
            {'text': ugettext('Submit a New Add-on'),
             'href': reverse('devhub.submit.agreement')})
        no_more_lwt = waffle.switch_is_active('disable-lwt-uploads')
        tools_links.append(
            {'text': ugettext('Submit a New Theme'),
             'href': reverse('devhub.submit.agreement' if no_more_lwt
                             else 'devhub.themes.submit')})
        tools_links.append(
            {'text': ugettext('Developer Hub'),
             'href': reverse('devhub.index')})
        tools_links.append(
            {'text': ugettext('Manage API Keys'),
             'href': reverse('devhub.api_key')}
        )

        if is_reviewer:
            tools_links.append({'text': ugettext('Reviewer Tools'),
                                'href': reverse('reviewers.dashboard')})
        if acl.action_allowed(request, amo.permissions.ANY_ADMIN):
            tools_links.append({'text': ugettext('Admin Tools'),
                                'href': reverse('zadmin.index')})

        context['user'] = request.user
    else:
        context['user'] = AnonymousUser()

    context.update({'account_links': account_links,
                    'settings': settings,
                    'amo': amo,
                    'tools_links': tools_links,
                    'tools_title': tools_title,
                    'is_reviewer': is_reviewer})
    return context
 def test_undecorated_method_is_set_properly_for_switch(self):
     self.assertFalse(waffle.switch_is_active('foo'))
Exemple #38
0
def send_mail(to_addr,
              mail,
              mimetype='html',
              from_addr=None,
              mailer=None,
              celery=True,
              username=None,
              password=None,
              callback=None,
              attachment_name=None,
              attachment_content=None,
              **context):
    """Send an email from the OSF.
    Example: ::

        from website import mails

        mails.send_email('*****@*****.**', mails.TEST, name="Foo")

    :param str to_addr: The recipient's email address
    :param Mail mail: The mail object
    :param str mimetype: Either 'plain' or 'html'
    :param function callback: celery task to execute after send_mail completes
    :param **context: Context vars for the message template

    .. note:
         Uses celery if available
    """
    if waffle.switch_is_active(
            features.DISABLE_ENGAGEMENT_EMAILS) and mail.engagement:
        return False

    from_addr = from_addr or settings.FROM_EMAIL
    mailer = mailer or tasks.send_email
    subject = mail.subject(**context)
    message = mail.html(**context)
    # Don't use ttls and login in DEBUG_MODE
    ttls = login = not settings.DEBUG_MODE
    logger.debug('Sending email...')
    logger.debug(
        u'To: {to_addr}\nFrom: {from_addr}\nSubject: {subject}\nMessage: {message}'
        .format(**locals()))

    kwargs = dict(
        from_addr=from_addr,
        to_addr=to_addr,
        subject=subject,
        message=message,
        mimetype=mimetype,
        ttls=ttls,
        login=login,
        username=username,
        password=password,
        categories=mail.categories,
        attachment_name=attachment_name,
        attachment_content=attachment_content,
    )

    logger.debug('Preparing to send...')
    if settings.USE_EMAIL:
        if settings.USE_CELERY and celery:
            logger.debug('Sending via celery...')
            return mailer.apply_async(kwargs=kwargs, link=callback)
        else:
            logger.debug('Sending without celery')
            ret = mailer(**kwargs)
            if callback:
                callback()

            return ret
 def test_disabled():
     assert not waffle.switch_is_active('foo')
Exemple #40
0
def fetch_bugs(components=COMPONENTS, days=None):
    """Fetch all bugs from Bugzilla.

    Loop over components and fetch bugs updated the last days. Link
    Bugzilla users with users on this website, when possible.

    # TODO: This can trigger a does not exist error because the task was picked
    # by the worker before the transaction was complete. Needs fixing after the
    # upgrade to a Django version > 1.8
    """
    now = timezone.now()
    if not days:
        changed_date = get_last_updated_date()
    else:
        changed_date = now - timedelta(int(days))

    for component in components:
        offset = 0
        url = URL.format(api_key=settings.REMOZILLA_API_KEY,
                         component=quote(component),
                         fields=','.join(BUGZILLA_FIELDS),
                         timestamp=changed_date,
                         offset=offset,
                         limit=LIMIT)

        while True:
            bugs = requests.get(url).json()
            error = bugs.get('error')

            # Check the server response for errors
            if error:
                raise ValueError('Invalid response from server, {0}.'.format(
                    bugs['message']))

            remo_bugs = bugs.get('bugs', [])
            if not remo_bugs:
                break

            for bdata in remo_bugs:
                # Get comments for current bug
                comment_url = COMMENT_URL.format(
                    id=bdata['id'], api_key=settings.REMOZILLA_API_KEY)
                comments = requests.get(comment_url).json()
                error = comments.get('error')

                if error:
                    raise ValueError(
                        'Invalid response from server, {0}.'.format(
                            comments['message']))

                bug, created = Bug.objects.get_or_create(bug_id=bdata['id'])

                bug.summary = bdata.get('summary', '')
                creator_email = bdata['creator']
                bug.creator = get_object_or_none(User, email=creator_email)
                bug.bug_creation_time = parse_bugzilla_time(
                    bdata['creation_time'])
                bug.component = bdata['component']
                bug.whiteboard = bdata.get('whiteboard', '')

                bug.cc.clear()
                for email in bdata.get('cc', []):
                    cc_user = get_object_or_none(User, email=email)
                    if cc_user:
                        bug.cc.add(cc_user)

                bug.assigned_to = get_object_or_none(
                    User, email=bdata['assigned_to'])
                bug.status = bdata['status']
                bug.resolution = bdata.get('resolution', '')
                bug.bug_last_change_time = parse_bugzilla_time(
                    bdata.get('last_change_time'))

                automated_voting_trigger = 0
                bug.budget_needinfo.clear()
                bug.council_member_assigned = False
                bug.pending_mentor_validation = False
                for flag in bdata.get('flags', []):
                    if flag['status'] == '?' and flag['name'] == BUG_APPROVAL:
                        automated_voting_trigger += 1
                        if BUG_WHITEBOARD in bug.whiteboard:
                            bug.council_member_assigned = True
                    if ((flag['status'] == '?' and flag['name'] == 'needinfo'
                         and 'requestee' in flag and flag['requestee']
                         == (settings.REPS_REVIEW_ALIAS))):
                        automated_voting_trigger += 1
                    if flag['status'] == '?' and flag['name'] == BUG_REVIEW:
                        bug.pending_mentor_validation = True
                    if (flag['status'] == '?' and flag['name'] == 'needinfo'
                            and 'requestee' in flag):
                        email = flag['requestee']
                        user = get_object_or_none(User, email=email)
                        if user:
                            bug.budget_needinfo.add(user)

                if automated_voting_trigger == 2 and waffle.switch_is_active(
                        'automated_polls'):
                    bug.council_vote_requested = True

                unicode_id = str(bdata['id'])
                bug_comments = comments['bugs'][unicode_id]['comments']
                if bug_comments and bug_comments[0].get('text', ''):
                    # Enforce unicode encoding.
                    bug.first_comment = bug_comments[0]['text']

                bug.save()

            offset += LIMIT
            url = urlparams(url, offset=offset)

    set_last_updated_date(now)
Exemple #41
0
def call_mad_api(all_results, upload_pk):
    """
    Call the machine learning API (mad-server) for a given FileUpload.

    This task is the callback of the Celery chord in the validation chain. It
    receives all the results returned by all the tasks in this chord.

    - `all_results` are the results returned by all the tasks in the chord.
    - `upload_pk` is the FileUpload ID.
    """
    # In case of a validation error (linter or scanner), we do want to skip
    # this task. This is similar to the behavior of all other tasks decorated
    # with `@validation_task` but, because this task is the callback of a
    # Celery chord, we cannot use this decorator.
    for results in all_results:
        if results['errors'] > 0:
            return results

    # The first task registered in the chord is `forward_linter_results()`:
    results = all_results[0]

    if not waffle.switch_is_active('enable-mad'):
        log.info('Skipping scanner "mad" task, switch is off')
        return results

    request_id = uuid.uuid4().hex
    log.info('Starting scanner "mad" task for FileUpload %s, request_id=%s.',
             upload_pk, request_id)

    if not results['metadata']['is_webextension']:
        log.info(
            'Not calling scanner "mad" for FileUpload %s, it is not '
            'a webextension.',
            upload_pk,
        )
        return results

    try:
        # TODO: retrieve all scanner results and pass each result to the API.
        customs_results = ScannerResult.objects.get(
            upload_id=upload_pk, scanner=CUSTOMS
        )

        scanMapKeys = customs_results.results.get('scanMap', {}).keys()
        if len(scanMapKeys) < 2:
            log.info(
                'Not calling scanner "mad" for FileUpload %s, scanMap is too '
                'small.',
                upload_pk
            )
            statsd.incr('devhub.mad.skip')
            return results

        with statsd.timer('devhub.mad'):
            with requests.Session() as http:
                adapter = HTTPAdapter(
                    max_retries=Retry(
                        total=1,
                        method_whitelist=['POST'],
                        status_forcelist=[500, 502, 503, 504],
                    )
                )
                http.mount("http://", adapter)
                http.mount("https://", adapter)

                json_payload = {
                    'scanners': {'customs': customs_results.results}
                }
                response = http.post(
                    url=settings.MAD_API_URL,
                    json=json_payload,
                    timeout=settings.MAD_API_TIMEOUT,
                    headers={'x-request-id': request_id},
                )

        try:
            data = response.json()
        except ValueError:
            # Log the response body when JSON decoding has failed.
            raise ValueError(response.text)

        if response.status_code != 200:
            raise ValueError(data)

        default_score = -1
        ScannerResult.objects.create(
            upload_id=upload_pk,
            scanner=MAD,
            results=data,
            score=data.get('ensemble', default_score),
        )

        # Update the individual scanner results with some info from MAD.
        customs_data = data.get('scanners', {}).get('customs', {})
        customs_score = customs_data.get('score', default_score)
        customs_model_version = customs_data.get('model_version')
        customs_results.update(
            score=customs_score, model_version=customs_model_version
        )

        statsd.incr('devhub.mad.success')
        log.info('Ending scanner "mad" task for FileUpload %s.', upload_pk)
    except Exception:
        statsd.incr('devhub.mad.failure')
        # We log the exception but we do not raise to avoid perturbing the
        # submission flow.
        log.exception(
            'Error in scanner "mad" task for FileUpload %s.', upload_pk
        )

    return results
Exemple #42
0
def get_series_line(model,
                    group,
                    primary_field=None,
                    extra_fields=None,
                    extra_values=None,
                    **filters):
    """
    Get a generator of dicts for the stats model given by the filters, made
    to fit into Highchart's datetime line graph.

    primary_field takes a field name that can be referenced by the key 'count'
    extra_fields takes a list of fields that can be found in the index
    on top of date and count and can be seen in the output
    extra_values is a list of constant values added to each line
    """
    if not extra_fields:
        extra_fields = []

    extra_values = extra_values or {}

    if waffle.switch_is_active('monolith-stats'):
        keys = {
            Installed: 'app_installs',
            UpdateCount: 'updatecount_XXX',
            Contribution: 'contribution_XXX',
            InappPayment: 'inapppayment_XXX'
        }

        # Getting data from the monolith server.
        client = get_monolith_client()

        field = keys[model]
        start, end = filters['date__range']

        if group == 'date':
            group = 'day'

        try:
            for result in client(field,
                                 start,
                                 end,
                                 interval=group,
                                 addon_id=filters['addon']):
                res = {'count': result['count']}
                for extra_field in extra_fields:
                    res[extra_field] = result[extra_field]
                date_ = date(*result['date'].timetuple()[:3])
                res['end'] = res['date'] = date_
                res.update(extra_values)
                yield res
        except ValueError as e:
            if len(e.args) > 0:
                logger.error(e.args[0])

    else:
        # Pull data out of ES
        data = list(
            (model.search().order_by('-date').filter(**filters).values_dict(
                'date', 'count', primary_field, *extra_fields))[:365])

        # Pad empty data with dummy dicts.
        days = [datum['date'].date() for datum in data]
        fields = []
        if primary_field:
            fields.append(primary_field)
        if extra_fields:
            fields += extra_fields
        data += pad_missing_stats(days, group, filters.get('date__range'),
                                  fields)

        # Sort in descending order.
        data = sorted(data,
                      key=lambda document: document['date'],
                      reverse=True)

        # Generate dictionary with options from ES document
        for val in data:
            # Convert the datetimes to a date.
            date_ = date(*val['date'].timetuple()[:3])
            if primary_field and primary_field != 'count':
                rv = dict(count=val[primary_field], date=date_, end=date_)
            else:
                rv = dict(count=val['count'], date=date_, end=date_)
            for extra_field in extra_fields:
                rv[extra_field] = val[extra_field]
            rv.update(extra_values)
            yield rv
Exemple #43
0
def global_settings(request):
    """
    Storing standard AMO-wide information used in global headers, such as
    account links and settings.
    """
    account_links = []
    tools_links = []
    context = {}

    tools_title = _('Developer')

    if request.user.is_authenticated() and hasattr(request, 'amo_user'):
        amo_user = request.amo_user
        account_links.append({
            'text': _('View Profile'),
            'href': request.user.get_profile().get_url_path(),
        })
        account_links.append({'text': _('Edit Profile'),
                              'href': reverse('users.edit')})
        account_links.append({
            'text': _('My Collections'),
            'href': reverse('collections.user', args=[amo_user.username])})

        if amo_user.favorite_addons:
            account_links.append(
                {'text': _('My Favorites'),
                 'href': reverse('collections.detail',
                                 args=[amo_user.username, 'favorites'])})

        if waffle.switch_is_active('marketplace'):
            account_links.append({'text': _('My Purchases'),
                                  'href': reverse('users.purchases')})
        account_links.append({
            'text': _('Log out'),
            'href': remora_url('/users/logout?to=' + urlquote(request.path)),
        })

        if request.amo_user.is_developer:
            tools_links.append({'text': _('Manage My Add-ons'),
                                'href': reverse('devhub.addons')})

            tools_links.append({'text': _('Submit a New Add-on'),
                                'href': reverse('devhub.submit.1')})

            if waffle.flag_is_active(request, 'accept-webapps'):
                tools_links.append({'text': _('Submit a New Web App'),
                                    'href': reverse('devhub.submit_apps.1')})

        tools_links.append({'text': _('Developer Hub'),
                            'href': reverse('devhub.index')})

        if acl.action_allowed(request, 'Editors', '%'):
            tools_title = _('Tools')
            tools_links.append({'text': _('Editor Tools'),
                                'href': reverse('editors.home')})
        if acl.action_allowed(request, 'Localizers', '%'):
            tools_title = _('Tools')
            tools_links.append({'text': _('Localizer Tools'),
                                'href': '/localizers'})
        if acl.action_allowed(request, 'Admin', '%'):
            tools_title = _('Tools')
            tools_links.append({'text': _('Admin Tools'),
                                'href': reverse('zadmin.home')})

        context['amo_user'] = request.amo_user
    else:
        context['amo_user'] = AnonymousUser()

    context.update({'account_links': account_links,
                    'settings': settings, 'amo': amo,
                    'tools_links': tools_links,
                    'tools_title': tools_title,
                    'ADMIN_MESSAGE': get_config('site_notice')})
    return context
Exemple #44
0
    def from_upload(
        cls,
        upload,
        addon,
        channel,
        *,
        selected_apps=None,
        compatibility=None,
        parsed_data=None,
    ):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a channel id and the parsed_data generated by
        parse_addon(). Additionally, for non-themes: either a list of compatible app ids
        needs to be provided as `selected_apps`, or a list of `ApplicationVersions`
        instances for each compatible app as `compatibility`.

        If `compatibility` is provided: the `version` property of the instances will be
        set to the new upload and the instances saved. If the min and/or max properties
        of the `ApplicationVersions` instance are none then `AppVersion`s parsed from
        the manifest, or defaults, are used.

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.addons.utils import RestrictionChecker
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            compatibility = {
                app: (compatibility
                      or {}).get(app, ApplicationsVersions(application=app.id))
                for app in amo.APP_USAGE
            }
        assert selected_apps or compatibility

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if (not getattr(upload, 'user', None) or not upload.ip_address
                or not upload.source):
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        if not upload.user.last_login_ip or not upload.user.email:
            raise VersionCreateError(
                'FileUpload user does not have some required fields')

        # This should be guaranteed by the linter, just raise an explicit
        # exception if somehow it's wrong.
        if not isinstance(parsed_data.get('install_origins', []), list):
            raise VersionCreateError(
                'install_origins was not validated properly')

        license_id = parsed_data.get('license_id')
        if not license_id and channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
            release_notes=parsed_data.get('release_notes'),
        )
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                f'New version: {version!r} ({version.id}) from {upload!r}',
                extra={
                    'email': upload.user.email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_SIGNING_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user)

        if not compatibility:
            compatibility = {
                amo.APP_IDS[app_id]: ApplicationsVersions(application=app_id)
                for app_id in selected_apps
            }

        compatible_apps = {}
        for parsed_app in parsed_data.get('apps', []):
            if parsed_app.appdata not in compatibility:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue
            avs = compatibility[parsed_app.appdata]
            avs.version = version
            avs.min = getattr(avs, 'min', parsed_app.min)
            avs.max = getattr(avs, 'max', parsed_app.max)
            avs.save()
            compatible_apps[parsed_app.appdata] = avs

        # Pre-generate compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version.compatible_apps = compatible_apps

        # Record declared install origins. base_domain is set automatically.
        if waffle.switch_is_active('record-install-origins'):
            for origin in set(parsed_data.get('install_origins', [])):
                version.installorigin_set.create(origin=origin)

        # Create relevant file.
        File.from_upload(
            upload=upload,
            version=version,
            parsed_data=parsed_data,
        )

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if (waffle.switch_is_active('enable-yara')
                or waffle.switch_is_active('enable-customs')
                or waffle.switch_is_active('enable-wat')):
            ScannerResult.objects.filter(upload_id=upload.id).update(
                version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        # Check if the approval should be restricted
        if not RestrictionChecker(upload=upload).is_auto_approval_allowed():
            flag = ('auto_approval_disabled'
                    if channel == amo.RELEASE_CHANNEL_LISTED else
                    'auto_approval_disabled_unlisted')
            reviewer_flags_defaults[flag] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Exemple #45
0
    def get_or_create_user(self, *args, **kwargs):
        user = super(MozilliansAuthBackend, self).get_or_create_user(*args, **kwargs)
        if switch_is_active('dino-park-autologin') and user:
            self.request.session['oidc_login_next'] = '/beta'

        return user
Exemple #46
0
 def has_permission(self, request, view):
     if self.type == 'flag':
         return flag_is_active(request, self.name)
     elif self.type == 'switch':
         return switch_is_active(self.name)
     raise NotImplementedError
Exemple #47
0
def annotate_legacy_addon_restrictions(results, is_new_upload):
    """
    Annotate validation results to restrict uploads of legacy
    (non-webextension) add-ons if specific conditions are met.
    """
    metadata = results.get('metadata', {})
    is_webextension = metadata.get('is_webextension') is True

    if is_webextension:
        # If we're dealing with a webextension, return early as the whole
        # function is supposed to only care about legacy extensions.
        return results

    target_apps = metadata.get('applications', {})
    max_target_firefox_version = max(
        version_int(target_apps.get('firefox', {}).get('max', '')),
        version_int(target_apps.get('android', {}).get('max', '')))

    is_extension_or_complete_theme = (
        # Note: annoyingly, `detected_type` is at the root level, not under
        # `metadata`.
        results.get('detected_type') in ('theme', 'extension'))
    is_targeting_firefoxes_only = target_apps and (set(
        target_apps.keys()).intersection(
            ('firefox', 'android')) == set(target_apps.keys()))
    is_targeting_thunderbird_or_seamonkey_only = target_apps and (set(
        target_apps.keys()).intersection(
            ('thunderbird', 'seamonkey')) == set(target_apps.keys()))
    is_targeting_firefox_lower_than_53_only = (
        metadata.get('strict_compatibility') is True and
        # version_int('') is actually 200100. If strict compatibility is true,
        # the validator should have complained about the non-existant max
        # version, but it doesn't hurt to check that the value is sane anyway.
        max_target_firefox_version > 200100
        and max_target_firefox_version < 53000000000000)
    is_targeting_firefox_higher_or_equal_than_57 = (
        max_target_firefox_version >= 57000000000000
        and max_target_firefox_version < 99000000000000)

    # Thunderbird/Seamonkey only add-ons are moving to addons.thunderbird.net.
    if (is_targeting_firefoxes_only
            and waffle.switch_is_active('disallow-thunderbird-and-seamonkey')):
        msg = ugettext(u'Add-ons for Firefox only are listed at '
                       u'addons.mozilla.org. You can use the same '
                       u'account to update your add-ons on the old site.')

        insert_validation_message(results,
                                  message=msg,
                                  msg_id='thunderbird_and_seamonkey_migration')

    # New legacy add-ons targeting Firefox only must target Firefox 53 or
    # lower, strictly. Extensions targeting multiple other apps are exempt from
    # this.
    elif (is_new_upload and is_extension_or_complete_theme
          and is_targeting_firefoxes_only
          and not is_targeting_firefox_lower_than_53_only):

        msg = ugettext(
            u'Starting with Firefox 53, new add-ons on this site can '
            u'only be WebExtensions.')

        insert_validation_message(results,
                                  message=msg,
                                  msg_id='legacy_addons_restricted')

    # All legacy add-ons (new or upgrades) targeting Firefox must target
    # Firefox 56.* or lower, even if they target multiple apps.
    elif (is_extension_or_complete_theme
          and is_targeting_firefox_higher_or_equal_than_57):
        # Note: legacy add-ons targeting '*' (which is the default for sdk
        # add-ons) are excluded from this error, and instead are silently
        # rewritten as supporting '56.*' in the manifest parsing code.
        msg = ugettext(
            u'Legacy add-ons are not compatible with Firefox 57 or higher. '
            u'Use a maxVersion of 56.* or lower.')

        insert_validation_message(results,
                                  message=msg,
                                  msg_id='legacy_addons_max_version')

    return results
Exemple #48
0
def check_xpi_info(xpi_info, addon=None, xpi_file=None, user=None):
    from olympia.addons.models import Addon, DeniedGuid
    guid = xpi_info['guid']
    is_webextension = xpi_info.get('is_webextension', False)

    # If we allow the guid to be omitted we assume that one was generated
    # or existed before and use that one.
    # An example are WebExtensions that don't require a guid but we generate
    # one once they're uploaded. Now, if you update that WebExtension we
    # just use the original guid.
    if addon and not guid and is_webextension:
        xpi_info['guid'] = guid = addon.guid
    if not guid and not is_webextension:
        raise forms.ValidationError(ugettext('Could not find an add-on ID.'))

    if guid:
        current_user = core.get_user()
        if current_user:
            deleted_guid_clashes = Addon.unfiltered.exclude(
                authors__id=current_user.id).filter(guid=guid)
        else:
            deleted_guid_clashes = Addon.unfiltered.filter(guid=guid)

        if addon and addon.guid != guid:
            msg = ugettext(
                'The add-on ID in your manifest.json or install.rdf (%s) '
                'does not match the ID of your add-on on AMO (%s)')
            raise forms.ValidationError(msg % (guid, addon.guid))
        if (not addon and
            # Non-deleted add-ons.
            (Addon.objects.filter(guid=guid).exists() or
             # DeniedGuid objects for deletions for Mozilla disabled add-ons
             DeniedGuid.objects.filter(guid=guid).exists() or
             # Deleted add-ons that don't belong to the uploader.
             deleted_guid_clashes.exists())):
            raise forms.ValidationError(ugettext('Duplicate add-on ID found.'))
    if len(xpi_info['version']) > 32:
        raise forms.ValidationError(
            ugettext('Version numbers should have fewer than 32 characters.'))
    if not VERSION_RE.match(xpi_info['version']):
        raise forms.ValidationError(
            ugettext('Version numbers should only contain letters, numbers, '
                     'and these punctuation characters: +*.-_.'))

    if is_webextension and xpi_info.get('type') == amo.ADDON_STATICTHEME:
        if not waffle.switch_is_active('allow-static-theme-uploads'):
            raise forms.ValidationError(ugettext(
                'WebExtension theme uploads are currently not supported.'))

    if xpi_file:
        # Make sure we pass in a copy of `xpi_info` since
        # `resolve_webext_translations` modifies data in-place
        translations = Addon.resolve_webext_translations(
            xpi_info.copy(), xpi_file)
        verify_mozilla_trademark(translations['name'], core.get_user())

    # Parse the file to get and validate package data with the addon.
    if not acl.submission_allowed(user, xpi_info):
        raise forms.ValidationError(
            ugettext(u'You cannot submit this type of add-on'))

    if not addon and not system_addon_submission_allowed(
            user, xpi_info):
        guids = ' or '.join(
                '"' + guid + '"' for guid in amo.SYSTEM_ADDON_GUIDS)
        raise forms.ValidationError(
            ugettext(u'You cannot submit an add-on with a guid ending '
                     u'%s' % guids))

    if not mozilla_signed_extension_submission_allowed(user, xpi_info):
        raise forms.ValidationError(
            ugettext(u'You cannot submit a Mozilla Signed Extension'))

    return xpi_info
 def test_enabled():
     assert waffle.switch_is_active('foo')
Exemple #50
0
def choose(request):
    if waffle.switch_is_active('tracking-protection-redirect'):
        return l10n_utils.render(request, 'firefox/choose.html')
    else:
        query = force_text(request.META.get('QUERY_STRING'), errors='ignore')
        return HttpResponseRedirect('?'.join([reverse('firefox.new'), query]))
Exemple #51
0
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        from olympia.addons.models import AddonReviewerFlags
        from olympia.git.utils import create_git_extraction_entry

        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        if upload.addon and upload.addon != addon:
            raise VersionCreateError(
                'FileUpload was made for a different Addon')

        if not upload.user or not upload.ip_address or not upload.source:
            raise VersionCreateError(
                'FileUpload does not have some required fields')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (
                'This version has been signed with Mozilla internal certificate.'
            )
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        email = upload.user.email if upload.user and upload.user.email else ''
        with core.override_remote_addr(upload.ip_address):
            # The following log statement is used by foxsec-pipeline.
            # We override the IP because it might be called from a task and we
            # want the original IP from the submitter.
            log.info(
                'New version: %r (%s) from %r' % (version, version.id, upload),
                extra={
                    'email': email,
                    'guid': addon.guid,
                    'upload': upload.uuid.hex,
                    'user_id': upload.user_id,
                    'from_api': upload.source == amo.UPLOAD_SOURCE_API,
                },
            )
            activity.log_create(amo.LOG.ADD_VERSION,
                                version,
                                addon,
                                user=upload.user or get_task_user())

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # Pre-generate _compatible_apps property to avoid accidentally
        # triggering queries with that instance later.
        version._compatible_apps = compatible_apps

        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(
                upload=upload,
                version=version,
                parsed_data=parsed_data,
            )
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()

        # After the upload has been copied to its permanent location, delete it
        # from storage. Keep the FileUpload instance (it gets cleaned up by a
        # cron eventually some time after its creation, in amo.cron.gc()),
        # making sure it's associated with the add-on instance.
        storage.delete(upload.path)
        upload.path = ''
        if upload.addon is None:
            upload.addon = addon
        upload.save()

        version_uploaded.send(instance=version, sender=Version)

        if version.is_webextension:
            if (waffle.switch_is_active('enable-yara')
                    or waffle.switch_is_active('enable-customs')
                    or waffle.switch_is_active('enable-wat')):
                ScannerResult.objects.filter(upload_id=upload.id).update(
                    version=version)

        if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
            # Schedule this version for git extraction.
            transaction.on_commit(
                lambda: create_git_extraction_entry(version=version))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Reset add-on reviewer flags to disable auto-approval and require
        # admin code review if the package has already been signed by mozilla.
        reviewer_flags_defaults = {}
        is_mozilla_signed = parsed_data.get('is_mozilla_signed_extension')
        if upload.validation_timeout:
            reviewer_flags_defaults['needs_admin_code_review'] = True
        if is_mozilla_signed and addon.type != amo.ADDON_LPAPP:
            reviewer_flags_defaults['needs_admin_code_review'] = True
            reviewer_flags_defaults['auto_approval_disabled'] = True

        if reviewer_flags_defaults:
            AddonReviewerFlags.objects.update_or_create(
                addon=addon, defaults=reviewer_flags_defaults)

        # Authors need to be notified about auto-approval delay again since
        # they are submitting a new version.
        addon.reset_notified_about_auto_approval_delay()

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
Exemple #52
0
    def get_transaction_parameters(self,
                                   basket,
                                   request=None,
                                   use_client_side_checkout=False,
                                   **kwargs):
        """
        Create a new Alipay payment.

        Arguments:
            basket (Basket): The basket of products being purchased.
            request (Request, optional): A Request object which is used to construct PayPal's `return_url`.
            use_client_side_checkout (bool, optional): This value is not used.
            **kwargs: Additional parameters; not used by this method.

        Returns:
            dict: PayPal-specific parameters required to complete a transaction. Must contain a URL
                to which users can be directed in order to approve a newly created payment.

        Raises:
            GatewayError: Indicates a general error or unexpected behavior on the part of PayPal which prevented
                a payment from being created.
        """
        return_url = urljoin(get_ecommerce_url(), reverse('alipay:execute'))
        data = {
            'intent':
            'sale',
            'redirect_urls': {
                'return_url': return_url,
                'cancel_url': self.cancel_url,
            },
            'payer': {
                'payment_method': 'alipay',
            },
            'transactions': [{
                'amount': {
                    'total': unicode(basket.total_incl_tax),
                    'currency': basket.currency,
                },
                'item_list': {
                    'items': [
                        {
                            'quantity':
                            line.quantity,
                            # PayPal requires that item names be at most 127 characters long.
                            'name':
                            middle_truncate(line.product.title, 127),
                            # PayPal requires that the sum of all the item prices (where price = price * quantity)
                            # equals to the total amount set in amount['total'].
                            'price':
                            unicode(line.line_price_incl_tax_incl_discounts /
                                    line.quantity),
                            'currency':
                            line.stockrecord.price_currency,
                        } for line in basket.all_lines()
                    ],
                },
                'invoice_number': basket.order_number,
            }],
        }

        if waffle.switch_is_active('create_and_set_webprofile'):
            locale_code = self.resolve_alipay_locale(
                request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME))
            web_profile_id = self.create_temporary_web_profile(locale_code)
            if web_profile_id is not None:
                data['experience_profile_id'] = web_profile_id
        else:
            try:
                web_profile = AlipayWebProfile.objects.get(
                    name=self.DEFAULT_PROFILE_NAME)
                data['experience_profile_id'] = web_profile.id
            except AlipayWebProfile.DoesNotExist:
                pass

        available_attempts = 1
        if waffle.switch_is_active('PAYPAL_RETRY_ATTEMPTS'):
            available_attempts = self.retry_attempts

        for i in range(1, available_attempts + 1):
            try:
                payment = alipay_sdk.Payment(data, api=self.alipay_api)
                payment.create()
                if payment.success():
                    break
                else:
                    if i < available_attempts:
                        logger.warning(
                            u"Creating AliPay payment for basket [%d] was unsuccessful. Will retry.",
                            basket.id,
                            exc_info=True)
                    else:
                        error = self._get_error(payment)
                        # pylint: disable=unsubscriptable-object
                        entry = self.record_processor_response(
                            error,
                            transaction_id=error['debug_id'],
                            basket=basket)
                        logger.error(
                            u"%s [%d], %s [%d].",
                            "Failed to create AliPay payment for basket",
                            basket.id,
                            "AliPay's response recorded in entry",
                            entry.id,
                            exc_info=True)
                        raise GatewayError(error)

            except:  # pylint: disable=bare-except
                if i < available_attempts:
                    logger.warning(
                        u"Creating AliPay payment for basket [%d] resulted in an exception. Will retry.",
                        basket.id,
                        exc_info=True)
                else:
                    logger.exception(
                        u"After %d retries, creating AliPay payment for basket [%d] still experienced exception.",
                        i, basket.id)
                    raise

        entry = self.record_processor_response(payment.to_dict(),
                                               transaction_id=payment.id,
                                               basket=basket)
        logger.info(
            "Successfully created AliPay payment [%s] for basket [%d].",
            payment.id, basket.id)
        id = payment.id
        order_string = self.alipay_api.api_alipay_trade_page_pay(
            out_trade_no=payment.id,
            total_amount=unicode(basket.total_incl_tax),  #0.01,
            subject=middle_truncate(line.product.title, 127),
            return_url=return_url,
        )

        parameters = {
            'payment_page_url':
            self.alipay_api.default_endpoint() + '?' + order_string,
            #'payment_page_url': 'error_test',
        }

        return parameters
Exemple #53
0
 def display_verified_enrollment(self):
     return switch_is_active('display_verified_enrollment')
Exemple #54
0
 def can_migrate_to_fxa(self):
     return (waffle.switch_is_active('fx-accounts-migration')
             and self.source != amo.LOGIN_SOURCE_FXA)
Exemple #55
0
def submit_draft_for_review(auth, node, draft, *args, **kwargs):
    """Submit for approvals and/or notifications

    :return: serialized registration
    :rtype: dict
    :raises: HTTPError if embargo end date is invalid
    """
    if waffle.switch_is_active(features.OSF_PREREGISTRATION):
        raise HTTPError(
            http.GONE,
            data={
                'message_short':
                'The Prereg Challenge has ended',
                'message_long':
                'The Prereg Challenge has ended. No new submissions are accepted at this time.'
            })

    json_data = request.get_json()
    if 'data' not in json_data:
        raise HTTPError(http.BAD_REQUEST,
                        data=dict(message_long='Payload must include "data".'))
    data = json_data['data']
    if 'attributes' not in data:
        raise HTTPError(
            http.BAD_REQUEST,
            data=dict(message_long='Payload must include "data/attributes".'))
    attributes = data['attributes']
    meta = {}
    registration_choice = attributes['registration_choice']
    validate_registration_choice(registration_choice)
    if registration_choice == 'embargo':
        # Initiate embargo
        end_date_string = attributes['lift_embargo']
        validate_embargo_end_date(end_date_string, node)
        meta['embargo_end_date'] = end_date_string
    meta['registration_choice'] = registration_choice

    if draft.registered_node and not draft.registered_node.is_deleted:
        raise HTTPError(
            http.BAD_REQUEST,
            data=dict(
                message_long=
                'This draft has already been registered, if you wish to '
                'register it again or submit it for review please create '
                'a new draft.'))

    # Don't allow resubmission unless submission was rejected
    if draft.approval and draft.approval.state != Sanction.REJECTED:
        raise HTTPError(
            http.CONFLICT,
            data=dict(
                message_long='Cannot resubmit previously submitted draft.'))

    draft.submit_for_review(initiated_by=auth.user, meta=meta, save=True)

    if prereg_utils.get_prereg_schema() == draft.registration_schema:

        node.add_log(action=NodeLog.PREREG_REGISTRATION_INITIATED,
                     params={'node': node._primary_key},
                     auth=auth,
                     save=False)
        node.save()

    push_status_message(language.AFTER_SUBMIT_FOR_REVIEW,
                        kind='info',
                        trust=False,
                        id='registration_submitted')
    return {
        'data': {
            'links': {
                'html': node.web_url_for('node_registrations', _guid=True)
            }
        },
        'status': 'initiated',
    }, http.ACCEPTED
Exemple #56
0
def icloud_bookmarks_redirect(request):
    if (waffle.switch_is_active('icloud_bookmarks_redirect')):
        return redirect('/blocked/i1214/', permanent=False)
    else:
        return addon_detail(request, 'icloud-bookmarks')
Exemple #57
0
def about(request, project, slug, template_name='ignite/about.html'):
    if waffle.switch_is_active('announce_winners'):
        template_name = 'ignite/about-winners.html'
    return jingo.render(request, template_name)
Exemple #58
0
 def get_default_queryset(self):
     if waffle.switch_is_active(ENABLE_INACTIVE_SCHEMAS):
         return RegistrationSchema.objects.get_latest_versions(
             only_active=False)
     else:
         return RegistrationSchema.objects.get_latest_versions()
Exemple #59
0
    def get(self, request, course_id, error=None):
        """Displays the course mode choice page.

        Args:
            request (`Request`): The Django Request object.
            course_id (unicode): The slash-separated course key.

        Keyword Args:
            error (unicode): If provided, display this error message
                on the page.

        Returns:
            Response

        """
        course_key = CourseKey.from_string(course_id)

        # Check whether the user has access to this course
        # based on country access rules.
        embargo_redirect = embargo_api.redirect_if_blocked(
            course_key,
            user=request.user,
            ip_address=get_ip(request),
            url=request.path)
        if embargo_redirect:
            return redirect(embargo_redirect)

        enrollment_mode, is_active = CourseEnrollment.enrollment_mode_for_user(
            request.user, course_key)
        modes = CourseMode.modes_for_course_dict(course_key)
        ecommerce_service = EcommerceService()

        # We assume that, if 'professional' is one of the modes, it should be the *only* mode.
        # If there are both modes, default to non-id-professional.
        has_enrolled_professional = (
            CourseMode.is_professional_slug(enrollment_mode) and is_active)
        if CourseMode.has_professional_mode(
                modes) and not has_enrolled_professional:
            purchase_workflow = request.GET.get("purchase_workflow", "single")
            verify_url = reverse(
                'verify_student_start_flow',
                kwargs={'course_id': six.text_type(course_key)})
            redirect_url = "{url}?purchase_workflow={workflow}".format(
                url=verify_url, workflow=purchase_workflow)
            if ecommerce_service.is_enabled(request.user):
                professional_mode = modes.get(
                    CourseMode.NO_ID_PROFESSIONAL_MODE) or modes.get(
                        CourseMode.PROFESSIONAL)
                if purchase_workflow == "single" and professional_mode.sku:
                    redirect_url = ecommerce_service.get_checkout_page_url(
                        professional_mode.sku)
                if purchase_workflow == "bulk" and professional_mode.bulk_sku:
                    redirect_url = ecommerce_service.get_checkout_page_url(
                        professional_mode.bulk_sku)
            return redirect(redirect_url)

        course = modulestore().get_course(course_key)

        # If there isn't a verified mode available, then there's nothing
        # to do on this page.  Send the user to the dashboard.
        if not CourseMode.has_verified_mode(modes):
            return redirect(reverse('dashboard'))

        # If a user has already paid, redirect them to the dashboard.
        if is_active and (enrollment_mode in CourseMode.VERIFIED_MODES +
                          [CourseMode.NO_ID_PROFESSIONAL_MODE]):
            # If the course has started redirect to course home instead
            if course.has_started():
                return redirect(
                    reverse('openedx.course_experience.course_home',
                            kwargs={'course_id': course_key}))
            return redirect(reverse('dashboard'))

        donation_for_course = request.session.get("donation_for_course", {})
        chosen_price = donation_for_course.get(six.text_type(course_key), None)

        if CourseEnrollment.is_enrollment_closed(request.user, course):
            locale = to_locale(get_language())
            enrollment_end_date = format_datetime(course.enrollment_end,
                                                  'short',
                                                  locale=locale)
            params = six.moves.urllib.parse.urlencode(
                {'course_closed': enrollment_end_date})
            return redirect('{0}?{1}'.format(reverse('dashboard'), params))

        # When a credit mode is available, students will be given the option
        # to upgrade from a verified mode to a credit mode at the end of the course.
        # This allows students who have completed photo verification to be eligible
        # for univerity credit.
        # Since credit isn't one of the selectable options on the track selection page,
        # we need to check *all* available course modes in order to determine whether
        # a credit mode is available.  If so, then we show slightly different messaging
        # for the verified track.
        has_credit_upsell = any(
            CourseMode.is_credit_mode(mode)
            for mode in CourseMode.modes_for_course(course_key,
                                                    only_selectable=False))
        course_id = text_type(course_key)

        context = {
            "course_modes_choose_url":
            reverse("course_modes_choose", kwargs={'course_id': course_id}),
            "modes":
            modes,
            "has_credit_upsell":
            has_credit_upsell,
            "course_name":
            course.display_name_with_default,
            "course_org":
            course.display_org_with_default,
            "course_num":
            course.display_number_with_default,
            "chosen_price":
            chosen_price,
            "error":
            error,
            "responsive":
            True,
            "nav_hidden":
            True,
            "content_gating_enabled":
            ContentTypeGatingConfig.enabled_for_enrollment(
                user=request.user, course_key=course_key),
            "course_duration_limit_enabled":
            CourseDurationLimitConfig.enabled_for_enrollment(
                user=request.user, course_key=course_key),
        }
        context.update(
            get_experiment_user_metadata_context(
                course,
                request.user,
            ))

        title_content = _(
            "Congratulations!  You are now enrolled in {course_name}").format(
                course_name=course.display_name_with_default)

        context["title_content"] = title_content

        if "verified" in modes:
            verified_mode = modes["verified"]
            context["suggested_prices"] = [
                decimal.Decimal(x.strip())
                for x in verified_mode.suggested_prices.split(",")
                if x.strip()
            ]
            context["currency"] = verified_mode.currency.upper()
            context["min_price"] = verified_mode.min_price
            context["verified_name"] = verified_mode.name
            context["verified_description"] = verified_mode.description

            if verified_mode.sku:
                context[
                    "use_ecommerce_payment_flow"] = ecommerce_service.is_enabled(
                        request.user)
                context[
                    "ecommerce_payment_page"] = ecommerce_service.payment_page_url(
                    )
                context["sku"] = verified_mode.sku
                context["bulk_sku"] = verified_mode.bulk_sku

        context['currency_data'] = []
        if waffle.switch_is_active('local_currency'):
            if 'edx-price-l10n' not in request.COOKIES:
                currency_data = get_currency_data()
                try:
                    context['currency_data'] = json.dumps(currency_data)
                except TypeError:
                    pass
        return render_to_response("course_modes/choose.html", context)
Exemple #60
0
    def handle_processor_response(self, response, basket=None):
        """
        Execute an approved PayPal payment.

        This method creates PaymentEvents and Sources for approved payments.

        Arguments:
            response (dict): Dictionary of parameters returned by PayPal in the `return_url` query string.

        Keyword Arguments:
            basket (Basket): Basket being purchased via the payment processor.

        Raises:
            GatewayError: Indicates a general error or unexpected behavior on the part of PayPal which prevented
                an approved payment from being executed.

        Returns:
            HandledProcessorResponse
        """

        # By default PayPal payment will be executed only once.
        available_attempts = 1

        # Add retry attempts (provided in the configuration)
        # if the waffle switch 'ENABLE_PAYPAL_RETRY' is set
        if waffle.switch_is_active('PAYPAL_RETRY_ATTEMPTS'):
            available_attempts = available_attempts + self.retry_attempts

        for attempt_count in range(1, available_attempts + 1):
            payment = alipay_sdk.Payment.find(response.get('paymentId'),
                                              api=self.alipay_api)

            if payment.success():
                # On success break the loop.
                break

            # Raise an exception for payments that were not successfully executed. Consuming code is
            # responsible for handling the exception
            error = self._get_error(payment)
            # pylint: disable=unsubscriptable-object
            entry = self.record_processor_response(
                error, transaction_id=error['debug_id'], basket=basket)

            logger.warning(
                "Failed to execute PayPal payment on attempt [%d]. "
                "PayPal's response was recorded in entry [%d].", attempt_count,
                entry.id)

            # After utilizing all retry attempts, raise the exception 'GatewayError'
            if attempt_count == available_attempts:
                logger.error(
                    "Failed to execute PayPal payment [%s]. "
                    "PayPal's response was recorded in entry [%d].",
                    payment.id, entry.id)
                raise GatewayError

        self.record_processor_response(payment.to_dict(),
                                       transaction_id=payment.id,
                                       basket=basket)
        logger.info(
            "Successfully executed PayPal payment [%s] for basket [%d].",
            payment.id, basket.id)

        currency = payment.transactions[0].amount.currency
        total = Decimal(payment.transactions[0].amount.total)
        transaction_id = payment.id
        # payer_info.email may be None, see:
        # http://stackoverflow.com/questions/24090460/alipay-rest-api-return-empty-payer-info-for-non-us-accounts
        email = payment.payer.payer_info.email
        label = 'AliPay ({})'.format(email) if email else 'AliPay Account'

        return HandledProcessorResponse(transaction_id=transaction_id,
                                        total=total,
                                        currency=currency,
                                        card_number=label,
                                        card_type=None)