Ejemplo n.º 1
0
def _task(**kw):
    # Remove any dupes. `UNIQUE` constraint introduced in migration 504.
    dupes = (ADT.objects.values_list('addon', 'device_type')
                        .annotate(c=Count('id')).filter(c__gt=1))
    for addon, device_type, total in dupes:
        devices = ADT.objects.filter(addon_id=addon, device_type=device_type)
        for d in devices[:total - 1]:
            d.delete()

    # Remove stale device types.
    devices = ADT.objects.all()
    for chunk in chunked(devices, 50):
        for device in chunk:
            try:
                device.addon
            except ObjectDoesNotExist:
                device.delete()

    # `DEVICE_MOBILE` -> `DEVICE_MOBILE` and `DEVICE_GAIA`.
    devices = ADT.objects.filter(device_type=mkt.DEVICE_MOBILE.id)

    for chunk in chunked(devices, 50):
        for device in chunk:
            if mkt.DEVICE_GAIA in device.addon.device_types:
                continue
            device.id = None
            device.device_type = mkt.DEVICE_GAIA.id
            device.save()
            device.addon.save()
Ejemplo n.º 2
0
    def handle(self, *args, **options):
        app_ids = (FeedApp.objects.filter(color__isnull=True)
                                  .values_list('id', flat=True))
        coll_ids = (FeedCollection.objects.filter(color__isnull=True)
                                          .values_list('id', flat=True))

        for chunk in chunked(app_ids, 100):
            _migrate_collection_colors.delay(chunk, 'app')

        for chunk in chunked(coll_ids, 100):
            _migrate_collection_colors.delay(chunk, 'collection')
Ejemplo n.º 3
0
    def handle(self, *args, **options):
        app_ids = (FeedApp.objects.filter(color__isnull=True).values_list(
            'id', flat=True))
        coll_ids = (FeedCollection.objects.filter(
            color__isnull=True).values_list('id', flat=True))

        for chunk in chunked(app_ids, 100):
            _migrate_collection_colors.delay(chunk, 'app')

        for chunk in chunked(coll_ids, 100):
            _migrate_collection_colors.delay(chunk, 'collection')
    def handle(self, *args, **options):
        ids = (CommunicationNote.objects
                                .filter(note_type=comm.REVIEWER_COMMENT)
                                .values_list('id', flat=True))

        for log_chunk in chunked(ids, 100):
            _fix_developer_version_notes.delay(ids)
    def handle(self, *args, **kwargs):

        ids = []

        ids.extend(
            FeedCollectionMembership.objects.values_list('group', flat=True))
        ids.extend(
            FeedCollection.objects.values_list('name', flat=True))
        ids.extend(
            FeedCollection.objects.values_list('description', flat=True))
        ids.extend(
            FeedShelfMembership.objects.values_list('group', flat=True))
        ids.extend(
            FeedShelf.objects.values_list('description', flat=True))
        ids.extend(
            FeedShelf.objects.values_list('name', flat=True))
        ids.extend(
            FeedApp.objects.values_list('description', flat=True))
        ids.extend(
            FeedApp.objects.values_list('pullquote_text', flat=True))
        ids.extend(
            Version.objects.values_list('releasenotes', flat=True))
        ids.extend(
            Webapp.objects.values_list('description', flat=True))
        ids.extend(
            Webapp.objects.values_list('privacy_policy', flat=True))
        ids.extend(
            Geodata.objects.values_list('banner_message', flat=True))

        # Filter out any None's.
        ids = filter(None, ids)

        for chunk in chunked(ids, 100):
            update_translations.delay(chunk)
Ejemplo n.º 6
0
def dump_user_installs_cron():
    """
    Sets up tasks to do user install dumps.
    """
    chunk_size = 100
    # Get valid users to dump.
    user_ids = set(Installed.objects.filter(user__enable_recommendations=True)
                   .values_list('user', flat=True))

    # Clean up the path where we'll store the individual json files from each
    # user installs dump (which are in users/ in DUMPED_USERS_PATH).
    path_to_cleanup = os.path.join(settings.DUMPED_USERS_PATH, 'users')
    task_log.info('Cleaning up path {0}'.format(path_to_cleanup))
    try:
        for dirpath, dirnames, filenames in walk_storage(
                path_to_cleanup, storage=private_storage):
            for filename in filenames:
                private_storage.delete(os.path.join(dirpath, filename))
    except OSError:
        # Ignore if the directory does not exist.
        pass

    grouping = []
    for chunk in chunked(user_ids, chunk_size):
        grouping.append(dump_user_installs.subtask(args=[chunk]))

    post = zip_users.subtask(immutable=True)
    ts = chord(grouping, post)
    ts.apply_async()
Ejemplo n.º 7
0
def dump_user_installs_cron():
    """
    Sets up tasks to do user install dumps.
    """
    chunk_size = 100
    # Get valid users to dump.
    user_ids = set(
        Installed.objects.filter(
            user__enable_recommendations=True).values_list('user', flat=True))

    # Clean up the path where we'll store the individual json files from each
    # user installs dump (which are in users/ in DUMPED_USERS_PATH).
    path_to_cleanup = os.path.join(settings.DUMPED_USERS_PATH, 'users')
    task_log.info('Cleaning up path {0}'.format(path_to_cleanup))
    try:
        for dirpath, dirnames, filenames in walk_storage(
                path_to_cleanup, storage=private_storage):
            for filename in filenames:
                private_storage.delete(os.path.join(dirpath, filename))
    except OSError:
        # Ignore if the directory does not exist.
        pass

    grouping = []
    for chunk in chunked(user_ids, chunk_size):
        grouping.append(dump_user_installs.subtask(args=[chunk]))

    post = zip_users.subtask(immutable=True)
    ts = chord(grouping, post)
    ts.apply_async()
def run():
    for chunk in chunked(Webapp.objects.all(), 50):
        for app in chunk:
            try:
                generate_image_assets.delay(app)
            except Exception:
                pass
Ejemplo n.º 9
0
def convert(directory, delete=False):
    print 'Converting icons in %s' % directory

    pks = []
    k = 0
    for path, names, filenames in walk_storage(directory):
        for filename in filenames:
            old = os.path.join(path, filename)
            pre, ext = os.path.splitext(old)
            if (pre[-3:] in size_suffixes or ext not in extensions):
                continue

            if not storage.size(old):
                print 'Icon %s is empty, ignoring.' % old
                continue

            for size, size_suffix in zip(sizes, size_suffixes):
                new = '%s%s%s' % (pre, size_suffix, '.png')
                if os.path.exists(new):
                    continue
                resize_image(old, new, (size, size), remove_src=False)

            if ext != '.png':
                pks.append(os.path.basename(pre))

            if delete:
                storage.delete(old)

            k += 1
            if not k % 1000:
                print "... converted %s" % k

    for chunk in chunked(pks, 100):
        Webapp.objects.filter(pk__in=chunk).update(icon_type='image/png')
Ejemplo n.º 10
0
    def handle(self, *args, **options):
        task = tasks.get(options.get('task'))
        if not task:
            raise CommandError('Unknown task provided. Options are: %s'
                               % ', '.join(tasks.keys()))
        qs = Webapp.objects.all()
        if 'qs' in task:
            qs = qs.filter(*task['qs'])
        pks = qs.values_list('pk', flat=True).order_by('-last_updated')
        if 'pre' in task:
            # This is run in process to ensure its run before the tasks.
            pks = task['pre'](pks)
        if pks:
            kw = task.get('kwargs', {})
            # All the remaining tasks go in one group.
            grouping = []
            for chunk in chunked(pks, 100):
                grouping.append(
                    task['method'].subtask(args=[chunk], kwargs=kw))

            # Add the post task on to the end.
            post = None
            if 'post' in task:
                post = task['post'].subtask(args=[], kwargs=kw, immutable=True)
                ts = chord(grouping, post)
            else:
                ts = group(grouping)
            ts.apply_async()
Ejemplo n.º 11
0
    def handle(self, *args, **options):
        task = tasks.get(options.get('task'))
        if not task:
            raise CommandError('Unknown task provided. Options are: %s'
                               % ', '.join(tasks.keys()))
        qs = Webapp.objects.all()
        if 'qs' in task:
            qs = qs.filter(*task['qs'])
        pks = qs.values_list('pk', flat=True).order_by('-last_updated')
        if 'pre' in task:
            # This is run in process to ensure its run before the tasks.
            pks = task['pre'](pks)
        if pks:
            kw = task.get('kwargs', {})
            # All the remaining tasks go in one group.
            grouping = []
            for chunk in chunked(pks, 100):
                grouping.append(
                    task['method'].subtask(args=[chunk], kwargs=kw))

            # Add the post task on to the end.
            post = None
            if 'post' in task:
                post = task['post'].subtask(args=[], kwargs=kw, immutable=True)
                ts = chord(grouping, post)
            else:
                ts = group(grouping)
            ts.apply_async()
def run():
    """Delete duplicate image assets."""
    for chunk in chunked(Webapp.objects.all(), 50):
        for app in chunk:
            for slug in SIZE_SLUGS:
                assets = ImageAsset.objects.filter(addon=app, slug=slug)
                for asset in assets[1:]:
                    asset.delete()
Ejemplo n.º 13
0
    def handle(self, *args, **kwargs):
        qs = Webapp.objects.filter(_geodata__restricted=False)

        if kwargs['app']:
            qs = qs.filter(pk=kwargs['app'])

        apps = qs.values_list('id', flat=True)
        for chunk in chunked(apps, 100):
            fix_excluded_regions.delay(chunk)
Ejemplo n.º 14
0
def email_devs(request):
    form = DevMailerForm(request.POST or None)
    preview = EmailPreviewTopic(topic='email-devs')
    if preview.filter().count():
        preview_csv = reverse('zadmin.email_preview_csv', args=[preview.topic])
    else:
        preview_csv = None
    if request.method == 'POST' and form.is_valid():
        data = form.cleaned_data
        qs = (AddonUser.objects.filter(
            role__in=(mkt.AUTHOR_ROLE_DEV,
                      mkt.AUTHOR_ROLE_OWNER)).exclude(user__email=None))

        if data['recipients'] in ('payments', 'desktop_apps'):
            qs = qs.exclude(addon__status=mkt.STATUS_DELETED)
        else:
            qs = qs.filter(addon__status__in=mkt.LISTED_STATUSES)

        if data['recipients'] in ('payments', 'payments_region_enabled',
                                  'payments_region_disabled'):
            qs = qs.exclude(addon__premium_type__in=(mkt.ADDON_FREE,
                                                     mkt.ADDON_OTHER_INAPP))
            if data['recipients'] == 'payments_region_enabled':
                qs = qs.filter(addon__enable_new_regions=True)
            elif data['recipients'] == 'payments_region_disabled':
                qs = qs.filter(addon__enable_new_regions=False)
        elif data['recipients'] in ('apps', 'free_apps_region_enabled',
                                    'free_apps_region_disabled'):
            if data['recipients'] == 'free_apps_region_enabled':
                qs = qs.filter(addon__enable_new_regions=True)
            elif data['recipients'] == 'free_apps_region_disabled':
                qs = qs.filter(addon__enable_new_regions=False)
        elif data['recipients'] == 'desktop_apps':
            qs = (qs.filter(
                addon__addondevicetype__device_type=mkt.DEVICE_DESKTOP.id))
        else:
            raise NotImplementedError('If you want to support emailing other '
                                      'types of developers, do it here!')
        if data['preview_only']:
            # Clear out the last batch of previewed emails.
            preview.filter().delete()
        total = 0
        for emails in chunked(set(qs.values_list('user__email', flat=True)),
                              100):
            total += len(emails)
            tasks.admin_email.delay(emails,
                                    data['subject'],
                                    data['message'],
                                    preview_only=data['preview_only'],
                                    preview_topic=preview.topic)
        msg = 'Emails queued for delivery: %s' % total
        if data['preview_only']:
            msg = '%s (for preview only, emails not sent!)' % msg
        messages.success(request, msg)
        return redirect('zadmin.email_devs')
    return render(request, 'zadmin/email-devs.html',
                  dict(form=form, preview_csv=preview_csv))
Ejemplo n.º 15
0
    def handle(self, *args, **options):
        applog_ids = AppLog.objects.values_list('activity_log', flat=True)

        ids = (ActivityLog.objects.filter(
            pk__in=list(applog_ids), action__in=mkt.LOG_REVIEW_QUEUE)
            .order_by('created').values_list('id', flat=True))

        for log_chunk in chunked(ids, 100):
            _migrate_activity_log.delay(ids)
Ejemplo n.º 16
0
def email_devs(request):
    form = DevMailerForm(request.POST or None)
    preview = EmailPreviewTopic(topic='email-devs')
    if preview.filter().count():
        preview_csv = reverse('zadmin.email_preview_csv',
                              args=[preview.topic])
    else:
        preview_csv = None
    if request.method == 'POST' and form.is_valid():
        data = form.cleaned_data
        qs = (AddonUser.objects.filter(role__in=(mkt.AUTHOR_ROLE_DEV,
                                                 mkt.AUTHOR_ROLE_OWNER))
                               .exclude(user__email=None))

        if data['recipients'] in ('payments', 'desktop_apps'):
            qs = qs.exclude(addon__status=mkt.STATUS_DELETED)
        else:
            qs = qs.filter(addon__status__in=mkt.LISTED_STATUSES)

        if data['recipients'] in ('payments', 'payments_region_enabled',
                                  'payments_region_disabled'):
            qs = qs.exclude(addon__premium_type__in=(mkt.ADDON_FREE,
                                                     mkt.ADDON_OTHER_INAPP))
            if data['recipients'] == 'payments_region_enabled':
                qs = qs.filter(addon__enable_new_regions=True)
            elif data['recipients'] == 'payments_region_disabled':
                qs = qs.filter(addon__enable_new_regions=False)
        elif data['recipients'] in ('apps', 'free_apps_region_enabled',
                                    'free_apps_region_disabled'):
            if data['recipients'] == 'free_apps_region_enabled':
                qs = qs.filter(addon__enable_new_regions=True)
            elif data['recipients'] == 'free_apps_region_disabled':
                qs = qs.filter(addon__enable_new_regions=False)
        elif data['recipients'] == 'desktop_apps':
            qs = (qs.filter(
                addon__addondevicetype__device_type=mkt.DEVICE_DESKTOP.id))
        else:
            raise NotImplementedError('If you want to support emailing other '
                                      'types of developers, do it here!')
        if data['preview_only']:
            # Clear out the last batch of previewed emails.
            preview.filter().delete()
        total = 0
        for emails in chunked(set(qs.values_list('user__email', flat=True)),
                              100):
            total += len(emails)
            tasks.admin_email.delay(emails, data['subject'], data['message'],
                                    preview_only=data['preview_only'],
                                    preview_topic=preview.topic)
        msg = 'Emails queued for delivery: %s' % total
        if data['preview_only']:
            msg = '%s (for preview only, emails not sent!)' % msg
        messages.success(request, msg)
        return redirect('zadmin.email_devs')
    return render(request, 'zadmin/email-devs.html',
                  dict(form=form, preview_csv=preview_csv))
Ejemplo n.º 17
0
    def handle(self, *args, **options):
        applog_ids = AppLog.objects.values_list('activity_log', flat=True)

        ids = (ActivityLog.objects.filter(
            pk__in=list(applog_ids),
            action__in=mkt.LOG_REVIEW_QUEUE).order_by('created').values_list(
                'id', flat=True))

        for log_chunk in chunked(ids, 100):
            _migrate_activity_log.delay(ids)
Ejemplo n.º 18
0
    def handle(self, *args, **kw):
        from mkt.webapps.models import Webapp

        # Get apps.
        apps = Webapp.objects.filter(iarc_info__isnull=False)
        ids = kw.get("apps")
        if ids:
            apps = apps.filter(id__in=(int(id.strip()) for id in ids.split(",")))

        for chunk in chunked(apps.values_list("id", flat=True), 100):
            refresh_iarc_ratings.delay(chunk)
Ejemplo n.º 19
0
    def handle(self, *args, **kw):
        from mkt.webapps.models import Webapp

        # Get apps.
        apps = Webapp.objects.filter(iarc_info__isnull=False)
        ids = kw.get('apps')
        if ids:
            apps = apps.filter(
                id__in=(int(id.strip()) for id in ids.split(',')))

        for chunk in chunked(apps.values_list('id', flat=True), 100):
            refresh_iarc_ratings.delay(chunk)
Ejemplo n.º 20
0
def update_supported_locales(ids, **kw):
    """
    Task intended to run via command line to update all apps' supported locales
    based on the current version.
    """
    for chunk in chunked(ids, 50):
        for app in Webapp.objects.filter(id__in=chunk):
            try:
                if app.update_supported_locales():
                    _log(app, u'Updated supported locales')
            except Exception:
                _log(app, u'Updating supported locales failed.', exc_info=True)
Ejemplo n.º 21
0
def update_supported_locales(ids, **kw):
    """
    Task intended to run via command line to update all apps' supported locales
    based on the current version.
    """
    for chunk in chunked(ids, 50):
        for app in Webapp.objects.filter(id__in=chunk):
            try:
                if app.update_supported_locales():
                    _log(app, u'Updated supported locales')
            except Exception:
                _log(app, u'Updating supported locales failed.', exc_info=True)
Ejemplo n.º 22
0
def manifest_revalidation(request):
    if request.method == 'POST':
        # Collect the apps to revalidate.
        qs = Q(is_packaged=False, status=mkt.STATUS_PUBLIC,
               disabled_by_user=False)
        webapp_pks = Webapp.objects.filter(qs).values_list('pk', flat=True)

        for pks in chunked(webapp_pks, 100):
            update_manifests.delay(list(pks), check_hash=False)

        messages.success(request, 'Manifest revalidation queued')

    return render(request, 'zadmin/manifest.html')
Ejemplo n.º 23
0
def manifest_revalidation(request):
    if request.method == 'POST':
        # Collect the apps to revalidate.
        qs = Q(is_packaged=False,
               status=mkt.STATUS_PUBLIC,
               disabled_by_user=False)
        webapp_pks = Webapp.objects.filter(qs).values_list('pk', flat=True)

        for pks in chunked(webapp_pks, 100):
            update_manifests.delay(list(pks), check_hash=False)

        messages.success(request, 'Manifest revalidation queued')

    return render(request, 'zadmin/manifest.html')
    def handle(self, *args, **options):
        webapps = (Webapp.objects.filter(app_payment_accounts__isnull=False).
                   no_transforms().select_related('app_payment_accounts'))
        for chunk in chunked(webapps, 50):
            for app in chunk:
                generic_product = get_generic_product(app)
                if not generic_product:
                    continue
                print 'Found public_id', generic_product['public_id']

                if not options['dry_run']:
                    print 'Saving app', app
                    app.solitude_public_id = generic_product['public_id']
                    app.save()
Ejemplo n.º 25
0
    def handle(self, *args, **kw):
        from mkt.webapps.models import Webapp

        # Get apps.
        if waffle.switch_is_active('iarc-upgrade-v2'):
            apps = Webapp.objects.filter(iarc_cert__isnull=False)
        else:
            apps = Webapp.objects.filter(iarc_info__isnull=False)
        ids = kw.get('apps')
        if ids:
            apps = apps.filter(
                id__in=(int(id.strip()) for id in ids.split(',')))

        for chunk in chunked(apps.values_list('id', flat=True), 100):
            refresh_iarc_ratings.delay(chunk)
    def handle(self, *args, **options):
        webapps = (Webapp.objects.filter(app_payment_accounts__isnull=False)
                                 .no_transforms()
                                 .select_related('app_payment_accounts'))
        for chunk in chunked(webapps, 50):
            for app in chunk:
                generic_product = get_generic_product(app)
                if not generic_product:
                    continue
                print 'Found public_id', generic_product['public_id']

                if not options['dry_run']:
                    print 'Saving app', app
                    app.solitude_public_id = generic_product['public_id']
                    app.save()
Ejemplo n.º 27
0
def mkt_gc(**kw):
    """Site-wide garbage collections."""
    log.info('Collecting data to delete')
    logs = (ActivityLog.objects.filter(created__lt=days_ago(90)).exclude(
        action__in=mkt.LOG_KEEP).values_list('id', flat=True))

    for chunk in chunked(logs, 100):
        chunk.sort()
        log.info('Deleting log entries: %s' % str(chunk))
        delete_logs.delay(chunk)

    # Clear oauth nonce rows. These expire after 10 minutes but we're just
    # clearing those that are more than 1 day old.
    Nonce.objects.filter(created__lt=days_ago(1)).delete()

    # Delete the dump apps over 30 days.
    _remove_stale_files(os.path.join(settings.DUMPED_APPS_PATH, 'tarballs'),
                        settings.DUMPED_APPS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=public_storage)

    # Delete the dumped user installs over 30 days. Those are using private
    # storage.
    _remove_stale_files(os.path.join(settings.DUMPED_USERS_PATH, 'tarballs'),
                        settings.DUMPED_USERS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=private_storage)

    # Delete old files in select directories under TMP_PATH.
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'preview'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'icon'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)

    # Delete stale FileUploads.
    for fu in FileUpload.objects.filter(created__lte=days_ago(90)):
        log.debug(u'[FileUpload:{uuid}] Removing file: {path}'.format(
            uuid=fu.uuid, path=fu.path))
        if fu.path:
            try:
                private_storage.delete(fu.path)
            except OSError:
                pass
        fu.delete()
Ejemplo n.º 28
0
def mkt_gc(**kw):
    """Site-wide garbage collections."""
    log.info('Collecting data to delete')
    logs = (ActivityLog.objects.filter(created__lt=days_ago(90))
            .exclude(action__in=mkt.LOG_KEEP).values_list('id', flat=True))

    for chunk in chunked(logs, 100):
        chunk.sort()
        log.info('Deleting log entries: %s' % str(chunk))
        delete_logs.delay(chunk)

    # Clear oauth nonce rows. These expire after 10 minutes but we're just
    # clearing those that are more than 1 day old.
    Nonce.objects.filter(created__lt=days_ago(1)).delete()

    # Delete the dump apps over 30 days.
    _remove_stale_files(os.path.join(settings.DUMPED_APPS_PATH, 'tarballs'),
                        settings.DUMPED_APPS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=public_storage)

    # Delete the dumped user installs over 30 days. Those are using private
    # storage.
    _remove_stale_files(os.path.join(settings.DUMPED_USERS_PATH, 'tarballs'),
                        settings.DUMPED_USERS_DAYS_DELETE,
                        'Deleting old tarball: {0}',
                        storage=private_storage)

    # Delete old files in select directories under TMP_PATH.
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'preview'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)
    _remove_stale_files(os.path.join(settings.TMP_PATH, 'icon'),
                        settings.TMP_PATH_DAYS_DELETE,
                        'Deleting TMP_PATH file: {0}',
                        storage=private_storage)

    # Delete stale FileUploads.
    for fu in FileUpload.objects.filter(created__lte=days_ago(90)):
        log.debug(u'[FileUpload:{uuid}] Removing file: {path}'
                  .format(uuid=fu.uuid, path=fu.path))
        if fu.path:
            try:
                private_storage.delete(fu.path)
            except OSError:
                pass
        fu.delete()
Ejemplo n.º 29
0
def hide_disabled_files():
    # If an add-on or a file is disabled, it should be moved to
    # GUARDED_ADDONS_PATH so it's not publicly visible.
    #
    # We ignore deleted versions since we hide those files when deleted and
    # also due to bug 980916.
    ids = (File.objects.filter(version__deleted=False).filter(
        Q(status=mkt.STATUS_DISABLED)
        | Q(version__addon__status=mkt.STATUS_DISABLED)
        | Q(version__addon__disabled_by_user=True)).values_list('id',
                                                                flat=True))
    for chunk in chunked(ids, 300):
        qs = File.objects.filter(id__in=chunk)
        qs = qs.select_related('version')
        for f in qs:
            f.hide_disabled_file()
Ejemplo n.º 30
0
def hide_disabled_files():
    # If an add-on or a file is disabled, it should be moved to
    # GUARDED_ADDONS_PATH so it's not publicly visible.
    #
    # We ignore deleted versions since we hide those files when deleted and
    # also due to bug 980916.
    ids = (File.objects
           .filter(version__deleted=False)
           .filter(Q(status=mkt.STATUS_DISABLED) |
                   Q(version__addon__status=mkt.STATUS_DISABLED) |
                   Q(version__addon__disabled_by_user=True))
           .values_list('id', flat=True))
    for chunk in chunked(ids, 300):
        qs = File.objects.filter(id__in=chunk)
        qs = qs.select_related('version')
        for f in qs:
            f.hide_disabled_file()
def run():
    """
    Retroactively award theme reviewer points for all the theme
    reviewers done since the Great Theme Migration to amo up to
    when we started recording points.
    """
    start_date = datetime.date(2013, 8, 27)

    # Get theme reviews that are approves and rejects from before we started
    # awarding.
    approve = '"action": %s' % rvw.ACTION_APPROVE
    reject = '"action": %s' % rvw.ACTION_REJECT
    al = ActivityLog.objects.filter(
        Q(_details__contains=approve) | Q(_details__contains=reject),
        action=mkt.LOG.THEME_REVIEW.id, created__lte=start_date)

    for chunk in chunked(al, 50):
        # Review and thou shall receive.
        _batch_award_points.delay(chunk)
Ejemplo n.º 32
0
def dump_user_installs_cron():
    """
    Sets up tasks to do user install dumps.
    """
    chunk_size = 100
    # Get valid users to dump.
    user_ids = set(Installed.objects.filter(user__enable_recommendations=True)
                   .values_list('user', flat=True))

    # Remove old dump data before running.
    user_dir = os.path.join(settings.DUMPED_USERS_PATH, 'users')
    if os.path.exists(user_dir):
        shutil.rmtree(user_dir)

    grouping = []
    for chunk in chunked(user_ids, chunk_size):
        grouping.append(dump_user_installs.subtask(args=[chunk]))

    post = zip_users.subtask(immutable=True)
    ts = chord(grouping, post)
    ts.apply_async()
Ejemplo n.º 33
0
def dump_user_installs_cron():
    """
    Sets up tasks to do user install dumps.
    """
    chunk_size = 100
    # Get valid users to dump.
    user_ids = set(
        Installed.objects.filter(
            user__enable_recommendations=True).values_list('user', flat=True))

    # Remove old dump data before running.
    user_dir = os.path.join(settings.DUMPED_USERS_PATH, 'users')
    if os.path.exists(user_dir):
        shutil.rmtree(user_dir)

    grouping = []
    for chunk in chunked(user_ids, chunk_size):
        grouping.append(dump_user_installs.subtask(args=[chunk]))

    post = zip_users.subtask(immutable=True)
    ts = chord(grouping, post)
    ts.apply_async()
Ejemplo n.º 34
0
def chunk_indexing(indexer, chunk_size):
    """Chunk the items to index."""
    chunks = list(indexer.get_indexable().values_list('id', flat=True))
    return chunked(chunks, chunk_size), len(chunks)
Ejemplo n.º 35
0
def _region_exclude(ids, region_ids):
    ts = [region_exclude.subtask(args=[chunk, region_ids]) for chunk in chunked(ids, 100)]
    TaskSet(ts).apply_async()
Ejemplo n.º 36
0
def add_uuids(ids, **kw):
    for chunk in chunked(ids, 50):
        for app in Webapp.objects.filter(id__in=chunk):
            # Save triggers the creation of a guid if the app doesn't currently
            # have one.
            app.save()
Ejemplo n.º 37
0
def chunk_indexing(indexer, chunk_size):
    """Chunk the items to index."""
    chunks = list(indexer.get_indexable().values_list('id', flat=True))
    return chunked(chunks, chunk_size), len(chunks)
Ejemplo n.º 38
0
def dump_all_apps_tasks():
    all_pks = (Webapp.objects.visible()
                             .values_list('pk', flat=True)
                             .order_by('pk'))
    return [dump_apps.si(pks) for pks in chunked(all_pks, 100)]
Ejemplo n.º 39
0
def run():
    """Fix app ratings in ES (bug 787162)."""
    ids = (Webapp.objects.filter(total_reviews__gt=0)
           .values_list('id', flat=True))
    for chunk in chunked(ids, 50):
        [reindex_reviews.delay(pk) for pk in chunk]
Ejemplo n.º 40
0
    def handle(self, *args, **options):
        ids = Version.objects.filter(approvalnotes__isnull=False)

        for log_chunk in chunked(ids, 100):
            _migrate_approval_notes.delay(ids)
Ejemplo n.º 41
0
def dump_all_apps_tasks():
    all_pks = (Webapp.objects.visible().values_list('pk',
                                                    flat=True).order_by('pk'))
    return [dump_apps.si(pks) for pks in chunked(all_pks, 100)]
Ejemplo n.º 42
0
def update_app_trending():
    """
    Update trending for all published apps.

    We break these into chunks so we can bulk index them. Each chunk will
    process the apps in it and reindex them in bulk. After all the chunks are
    processed we find records that haven't been updated and purge/reindex those
    so we nullify their values.

    """
    chunk_size = 100

    ids = list(Webapp.objects.filter(status=mkt.STATUS_PUBLIC,
                                     disabled_by_user=False)
                     .values_list('id', flat=True))

    for chunk in chunked(ids, chunk_size):

        count = 0
        times = []
        reindex_ids = []

        for app in Webapp.objects.filter(id__in=chunk).no_transforms():

            reindex = False
            count += 1
            now = datetime.now()
            t_start = time.time()

            scores = _get_trending(app.id)

            # Update global trending, then per-region trending below.
            value = scores.get('all')
            if value > 0:
                reindex = True
                trending, created = app.trending.get_or_create(
                    region=0, defaults={'value': value})
                if not created:
                    trending.update(value=value, modified=now)
            else:
                # The value is <= 0 so the app is not trending. Let's remove it
                # from the trending table.
                app.trending.filter(region=0).delete()

            for region in mkt.regions.REGIONS_DICT.values():
                value = scores.get(region.slug)
                if value > 0:
                    reindex = True
                    trending, created = app.trending.get_or_create(
                        region=region.id, defaults={'value': value})
                    if not created:
                        trending.update(value=value, modified=now)
                else:
                    # The value is <= 0 so the app is not trending.
                    # Let's remove it from the trending table.
                    app.trending.filter(region=region.id).delete()

            times.append(time.time() - t_start)

            if reindex:
                reindex_ids.append(app.id)

        # Now reindex the apps that actually have a trending score.
        if reindex_ids:
            WebappIndexer.run_indexing(reindex_ids)

        log.info('Trending calculated for %s apps. Avg time overall: '
                 '%0.2fs' % (count, sum(times) / count))

    # Purge any records that were not updated.
    #
    # Note: We force update `modified` even if no data changes so any records
    # with older modified times can be purged.
    now = datetime.now()
    midnight = datetime(year=now.year, month=now.month, day=now.day)

    qs = Trending.objects.filter(modified__lte=midnight)
    # First get the IDs so we know what to reindex.
    purged_ids = qs.values_list('addon', flat=True).distinct()
    # Then delete them.
    qs.delete()

    for ids in chunked(purged_ids, chunk_size):
        WebappIndexer.run_indexing(ids)
Ejemplo n.º 43
0
def update_app_trending():
    """
    Update trending for all published apps.

    We break these into chunks so we can bulk index them. Each chunk will
    process the apps in it and reindex them in bulk. After all the chunks are
    processed we find records that haven't been updated and purge/reindex those
    so we nullify their values.

    """
    chunk_size = 100

    ids = list(
        Webapp.objects.filter(status=mkt.STATUS_PUBLIC,
                              disabled_by_user=False).values_list('id',
                                                                  flat=True))

    for chunk in chunked(ids, chunk_size):

        count = 0
        times = []
        reindex_ids = []

        for app in Webapp.objects.filter(id__in=chunk).no_transforms():

            reindex = False
            count += 1
            now = datetime.now()
            t_start = time.time()

            scores = _get_trending(app.id)

            # Update global trending, then per-region trending below.
            value = scores.get('all')
            if value > 0:
                reindex = True
                trending, created = app.trending.get_or_create(
                    region=0, defaults={'value': value})
                if not created:
                    trending.update(value=value, modified=now)
            else:
                # The value is <= 0 so the app is not trending. Let's remove it
                # from the trending table.
                app.trending.filter(region=0).delete()

            for region in mkt.regions.REGIONS_DICT.values():
                value = scores.get(region.slug)
                if value > 0:
                    reindex = True
                    trending, created = app.trending.get_or_create(
                        region=region.id, defaults={'value': value})
                    if not created:
                        trending.update(value=value, modified=now)
                else:
                    # The value is <= 0 so the app is not trending.
                    # Let's remove it from the trending table.
                    app.trending.filter(region=region.id).delete()

            times.append(time.time() - t_start)

            if reindex:
                reindex_ids.append(app.id)

        # Now reindex the apps that actually have a trending score.
        if reindex_ids:
            WebappIndexer.run_indexing(reindex_ids)

        log.info('Trending calculated for %s apps. Avg time overall: '
                 '%0.2fs' % (count, sum(times) / count))

    # Purge any records that were not updated.
    #
    # Note: We force update `modified` even if no data changes so any records
    # with older modified times can be purged.
    now = datetime.now()
    midnight = datetime(year=now.year, month=now.month, day=now.day)

    qs = Trending.objects.filter(modified__lte=midnight)
    # First get the IDs so we know what to reindex.
    purged_ids = qs.values_list('addon', flat=True).distinct()
    # Then delete them.
    qs.delete()

    for ids in chunked(purged_ids, chunk_size):
        WebappIndexer.run_indexing(ids)
Ejemplo n.º 44
0
def _region_exclude(ids, region_ids):
    ts = [region_exclude.subtask(args=[chunk, region_ids])
          for chunk in chunked(ids, 100)]
    TaskSet(ts).apply_async()
Ejemplo n.º 45
0
def adjust_categories(ids, **kw):
    NEW_APP_CATEGORIES = {
        425986: ['weather'],
        444314: ['travel', 'weather'],
        445008: ['travel', 'weather'],
        450602: ['weather'],
        455256: ['weather'],
        455660: ['travel', 'weather'],
        459364: ['weather'],
        461279: ['social', 'weather'],
        461371: ['lifestyle', 'weather'],
        462257: ['utilities', 'weather'],
        463108: ['weather'],
        466698: ['utilities', 'weather'],
        468173: ['weather'],
        470946: ['travel', 'weather'],
        482869: ['utilities', 'weather'],
        482961: ['weather'],
        496946: ['weather'],
        499699: ['weather'],
        501553: ['weather'],
        501581: ['lifestyle', 'weather'],
        501583: ['social', 'weather'],
        502171: ['weather', 'photo-video'],
        502173: ['weather', 'photo-video'],
        502685: ['weather'],
        503765: ['weather'],
        505437: ['weather'],
        506317: ['weather'],
        506543: ['weather'],
        506553: ['weather'],
        506623: ['weather', 'travel'],
        507091: ['weather'],
        507139: ['weather'],
        509150: ['weather'],
        510118: ['weather', 'utilities'],
        510334: ['weather', 'travel'],
        510726: ['weather'],
        511364: ['weather', 'utilities'],
        424184: ['food-drink', 'health-fitness'],
        439994: ['food-drink'],
        442842: ['maps-navigation', 'food-drink'],
        444056: ['lifestyle', 'food-drink'],
        444070: ['lifestyle', 'food-drink'],
        444222: ['food-drink', 'health-fitness'],
        444694: ['lifestyle', 'food-drink'],
        454558: ['food-drink', 'travel'],
        455620: ['food-drink', 'entertainment'],
        459304: ['food-drink', 'health-fitness'],
        465445: ['shopping', 'food-drink'],
        465700: ['food-drink', 'books-comics'],
        467828: ['food-drink', 'education'],
        469104: ['food-drink'],
        470145: ['food-drink', 'health-fitness'],
        471349: ['lifestyle', 'food-drink'],
        476155: ['lifestyle', 'food-drink'],
        477015: ['food-drink', 'travel'],
        497282: ['food-drink', 'health-fitness'],
        500359: ['food-drink', 'books-comics'],
        501249: ['food-drink'],
        501573: ['food-drink', 'entertainment'],
        504143: ['health-fitness', 'food-drink'],
        506111: ['health-fitness', 'food-drink'],
        506691: ['health-fitness', 'food-drink'],
        507921: ['books-comics', 'food-drink'],
        508211: ['food-drink', 'lifestyle'],
        508215: ['food-drink', 'lifestyle'],
        508990: ['food-drink', 'games'],
        506369: ['books-comics', 'humor'],
        509746: ['entertainment', 'humor'],
        509848: ['entertainment', 'humor'],
        511390: ['entertainment', 'humor'],
        511504: ['entertainment', 'humor'],
        488424: ['internet', 'reference'],
        489052: ['social', 'internet'],
        499644: ['internet', 'utilities'],
        500651: ['reference', 'internet'],
        505043: ['utilities', 'internet'],
        505407: ['utilities', 'internet'],
        505949: ['internet', 'reference'],
        508828: ['utilities', 'internet'],
        508830: ['utilities', 'internet'],
        509160: ['productivity', 'internet'],
        509606: ['productivity', 'internet'],
        509722: ['productivity', 'internet'],
        510114: ['news', 'internet'],
        364752: ['games', 'kids'],
        364941: ['games', 'kids'],
        449560: ['entertainment', 'kids'],
        466557: ['education', 'kids'],
        466811: ['photo-video', 'kids'],
        473532: ['education', 'kids'],
        473620: ['education', 'kids'],
        473865: ['education', 'kids'],
        500527: ['games', 'kids'],
        502263: ['photo-video', 'kids'],
        507497: ['education', 'kids'],
        508089: ['education', 'kids'],
        508229: ['education', 'kids'],
        508239: ['education', 'kids'],
        508247: ['education', 'kids'],
        509404: ['education', 'kids'],
        509464: ['education', 'kids'],
        509468: ['education', 'kids'],
        509470: ['education', 'kids'],
        509472: ['education', 'kids'],
        509474: ['education', 'kids'],
        509476: ['education', 'kids'],
        509478: ['education', 'kids'],
        509484: ['education', 'kids'],
        509486: ['education', 'kids'],
        509488: ['education', 'kids'],
        509490: ['education', 'kids'],
        509492: ['education', 'kids'],
        509494: ['education', 'kids'],
        509496: ['education', 'kids'],
        509498: ['education', 'kids'],
        509500: ['education', 'kids'],
        509502: ['education', 'kids'],
        509504: ['education', 'kids'],
        509508: ['education', 'kids'],
        509512: ['education', 'kids'],
        509538: ['education', 'kids'],
        509540: ['education', 'kids'],
        511502: ['games', 'kids'],
        367693: ['utilities', 'science-tech'],
        424272: ['science-tech', 'news'],
        460891: ['science-tech', 'news'],
        468278: ['science-tech', 'education'],
        468406: ['science-tech', 'education'],
        469765: ['science-tech', 'productivity'],
        480750: ['science-tech', 'education'],
        502187: ['science-tech', 'education'],
        504637: ['science-tech', 'reference'],
        506187: ['science-tech', 'utilities'],
        508672: ['news', 'science-tech'],
        510050: ['science-tech', 'education'],
        511370: ['science-tech', 'reference'],
        511376: ['science-tech', 'games'],
        512174: ['education', 'science-tech'],
        512194: ['utilities', 'science-tech'],
        377564: ['lifestyle', 'personalization'],
        451302: ['entertainment', 'personalization'],
        452888: ['personalization', 'photo-video'],
        466637: ['personalization', 'photo-video'],
        477186: ['photo-video', 'personalization'],
        477304: ['photo-video', 'personalization'],
        477314: ['photo-video', 'personalization'],
        480489: ['photo-video', 'personalization'],
        480495: ['photo-video', 'personalization'],
        481512: ['photo-video', 'personalization'],
        482162: ['music', 'personalization'],
        488892: ['social', 'personalization'],
        500037: ['entertainment', 'personalization'],
        500041: ['entertainment', 'personalization'],
        506495: ['personalization', 'music'],
        506581: ['entertainment', 'personalization'],
    }

    # Adjust apps whose categories have changed.
    for chunk in chunked(ids, 100):
        for app in Webapp.objects.filter(pk__in=chunk):
            save = False
            for k, v in CATEGORY_REDIRECTS.items():
                if k in app.categories:
                    save = True
                    app.categories.remove(k)
                    app.categories.append(v)
            if save:
                task_log.info(u'[app:{0}] Adjusted categories: {1}'
                              .format(app, app.categories))
                app.save()
    # Add apps to new categories.
    for pk, categories in NEW_APP_CATEGORIES.items():
        try:
            app = Webapp.objects.get(pk=pk)
        except Webapp.DoesNotExist:
            continue
        app.categories = categories
        app.save()
        task_log.info(u'[app:{0}] Updated app categories: {1}'
                      .format(app, categories))
    def handle(self, *args, **options):
        ids = (CommunicationNote.objects.filter(
            note_type=comm.REVIEWER_COMMENT).values_list('id', flat=True))

        for log_chunk in chunked(ids, 100):
            _fix_developer_version_notes.delay(ids)
Ejemplo n.º 47
0
 def upload(self):
     # The data should be in chunks.
     data = [''.join(x) for x in chunked(self.data, 3)]
     return FileUpload.from_post(data, 'filename.zip', len(self.data))
Ejemplo n.º 48
0
 def upload(self):
     # The data should be in chunks.
     data = [''.join(x) for x in chunked(self.data, 3)]
     return FileUpload.from_post(data, 'filename.zip', len(self.data))