Exemplo n.º 1
0
    def test_reset_base_because_over_reset_threshold(self, should_reset_mock):
        should_reset_mock.return_value = True
        set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True)
        set_config(MLBF_BASE_ID_CONFIG_KEY, 987654, json_value=True)
        prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456',
                                         'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump(['madeup@guid:12345'], blocked_file)
        base_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '987654',
                                         'blocked.json')
        with storage.open(base_blocked_path, 'w') as blocked_file:
            json.dump([], blocked_file)

        upload_mlbf_to_kinto()

        generation_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)

        self.publish_attachment_mock.assert_called_with(
            {
                'key_format': MLBF.KEY_FORMAT,
                'generation_time': generation_time,
                'attachment_type': 'bloomfilter-base'
            }, ('filter.bin', mock.ANY, 'application/octet-stream'))
        self.publish_record_mock.assert_not_called()
        self.delete_mock.assert_called_once()
        assert (get_config(MLBF_TIME_CONFIG_KEY,
                           json_value=True) == generation_time)
        assert (get_config(MLBF_BASE_ID_CONFIG_KEY,
                           json_value=True) == generation_time)

        gen_path = os.path.join(settings.MLBF_STORAGE_PATH,
                                str(generation_time))
        # no stash because we're starting with a new base mlbf
        assert not os.path.exists(os.path.join(gen_path, 'stash.json'))
Exemplo n.º 2
0
    def test_force_base_option(self, should_reset_mock):
        should_reset_mock.return_value = False

        # set the times to now
        now = datetime.datetime.now()
        now_timestamp = now.timestamp() * 1000
        set_config(MLBF_TIME_CONFIG_KEY, now_timestamp, json_value=True)
        self.block.update(modified=now)
        prev_blocked_path = os.path.join(
            settings.MLBF_STORAGE_PATH, str(now_timestamp), 'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump([f'{self.block.guid}:1.2b3'], blocked_file)
        # without force_base nothing happens
        upload_mlbf_to_remote_settings()
        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.assert_not_called()

        # but with force_base=True we generate a filter
        upload_mlbf_to_remote_settings(force_base=True)
        self.publish_attachment_mock.assert_called_once()  # the mlbf
        self.publish_record_mock.assert_not_called()  # no stash
        self.delete_mock.assert_called()  # the collection was cleared

        # doublecheck no stash
        gen_path = os.path.join(
            settings.MLBF_STORAGE_PATH,
            str(get_config(MLBF_TIME_CONFIG_KEY, json_value=True)))
        # no stash because we're starting with a new base mlbf
        assert not os.path.exists(os.path.join(gen_path, 'stash.json'))
Exemplo n.º 3
0
def save_motd(request):
    form = MOTDForm(request.POST)
    if form.is_valid():
        set_config('reviewers_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('reviewers.motd'))
    data = context(request, form=form)
    return render(request, 'reviewers/motd.html', data)
Exemplo n.º 4
0
    def test_stash_because_previous_mlbf(self):
        set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True)
        set_config(MLBF_BASE_ID_CONFIG_KEY, 123456, json_value=True)
        prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456',
                                         'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump(['madeup@guid:123'], blocked_file)

        upload_mlbf_to_kinto()

        generation_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)

        self.publish_attachment_mock.assert_called_with(
            {
                'key_format': MLBF.KEY_FORMAT,
                'generation_time': generation_time,
                'attachment_type': 'bloomfilter-full'
            }, ('filter.bin', mock.ANY, 'application/octet-stream'))
        self.publish_record_mock.assert_called_with({
            'key_format': MLBF.KEY_FORMAT,
            'stash_time': generation_time,
            'stash': {
                'blocked': [
                    f'{self.block.guid}:'
                    f'{self.block.addon.current_version.version}'
                ],
                'unblocked': ['madeup@guid:123']
            }
        })
        self.delete_mock.assert_not_called()
        assert (get_config(MLBF_TIME_CONFIG_KEY,
                           json_value=True) == generation_time)
        assert (get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == 123456)
Exemplo n.º 5
0
def save_motd(request):
    form = MOTDForm(request.POST)
    if form.is_valid():
        set_config('reviewers_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('reviewers.motd'))
    data = context(request, form=form)
    return render(request, 'reviewers/motd.html', data)
Exemplo n.º 6
0
def upload_filter_to_kinto(generation_time, is_base=True, upload_stash=False):
    server = KintoServer(KINTO_BUCKET,
                         KINTO_COLLECTION_MLBF,
                         kinto_sign_off_needed=False)
    mlbf = MLBF(generation_time)
    if is_base:
        # clear the collection for the base - we want to be the only filter
        server.delete_all_records()
    # Deal with possible stashes first
    if upload_stash:
        # If we have a stash, write that
        stash_data = {
            'key_format': MLBF.KEY_FORMAT,
            'stash_time': generation_time,
            'stash': mlbf.stash_json,
        }
        server.publish_record(stash_data)

    # Then the bloomfilter
    data = {
        'key_format':
        MLBF.KEY_FORMAT,
        'generation_time':
        generation_time,
        'attachment_type':
        BLOCKLIST_RECORD_MLBF_BASE
        if is_base else BLOCKLIST_RECORD_MLBF_UPDATE,
    }
    with storage.open(mlbf.filter_path, 'rb') as filter_file:
        attachment = ('filter.bin', filter_file, 'application/octet-stream')
        server.publish_attachment(data, attachment)
    server.complete_session()
    set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True)
    if is_base:
        set_config(MLBF_BASE_ID_CONFIG_KEY, generation_time, json_value=True)
Exemplo n.º 7
0
    def test_site_status(self):
        data = super(TestUserProfileSerializer, self).test_basic()
        assert data['site_status'] == {
            'read_only': False,
            'notice': None,
        }

        set_config('site_notice', 'THIS is NOT Á TEST!')
        data = super(TestUserProfileSerializer, self).test_basic()
        assert data['site_status'] == {
            'read_only': False,
            'notice': 'THIS is NOT Á TEST!',
        }

        with override_settings(READ_ONLY=True):
            data = super(TestUserProfileSerializer, self).test_basic()
        assert data['site_status'] == {
            'read_only': True,
            'notice': 'THIS is NOT Á TEST!',
        }

        Config.objects.get(key='site_notice').delete()
        with override_settings(READ_ONLY=True):
            data = super(TestUserProfileSerializer, self).test_basic()
        assert data['site_status'] == {
            'read_only': True,
            'notice': None,
        }
Exemplo n.º 8
0
def save_motd(request):
    if not acl.action_allowed(request, 'AddonReviewerMOTD', 'Edit'):
        raise PermissionDenied
    form = forms.MOTDForm(request.POST)
    if form.is_valid():
        set_config('editors_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('editors.motd'))
    data = context(request, form=form)
    return render(request, 'editors/motd.html', data)
Exemplo n.º 9
0
def save_motd(request):
    if not acl.action_allowed(request, 'AddonReviewerMOTD', 'Edit'):
        raise PermissionDenied
    form = forms.MOTDForm(request.POST)
    if form.is_valid():
        set_config('editors_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('editors.motd'))
    data = context(request, form=form)
    return render(request, 'editors/motd.html', data)
Exemplo n.º 10
0
def save_motd(request):
    if not acl.action_allowed(
            request, amo.permissions.ADDON_REVIEWER_MOTD_EDIT):
        raise PermissionDenied
    form = forms.MOTDForm(request.POST)
    if form.is_valid():
        set_config('editors_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('editors.motd'))
    data = context(request, form=form)
    return render(request, 'editors/motd.html', data)
Exemplo n.º 11
0
def save_motd(request):
    if not acl.action_allowed(request,
                              amo.permissions.ADDON_REVIEWER_MOTD_EDIT):
        raise PermissionDenied
    form = forms.MOTDForm(request.POST)
    if form.is_valid():
        set_config('reviewers_review_motd', form.cleaned_data['motd'])
        return redirect(reverse('reviewers.motd'))
    data = context(request, form=form)
    return render(request, 'reviewers/motd.html', data)
Exemplo n.º 12
0
    def test_no_block_changes(frozen_time, self):
        # This was the last time the mlbf was generated
        last_time = int(
            (frozen_time() - timedelta(seconds=1)).timestamp() * 1000)
        # And the Block was modified just that before so would be included
        self.block.update(modified=(frozen_time() - timedelta(seconds=2)))
        set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True)
        set_config(MLBF_BASE_ID_CONFIG_KEY, last_time, json_value=True)
        prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH,
                                         str(last_time), 'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump([f'{self.block.guid}:1.2b3'], blocked_file)

        upload_mlbf_to_remote_settings()
        # So no need for a new bloomfilter
        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.assert_not_called()
        self.cleanup_files_mock.assert_not_called()

        # But if we add a new Block a new filter is needed
        Block.objects.create(
            addon=addon_factory(file_kw={
                'is_signed': True,
                'is_webextension': True
            }),
            updated_by=user_factory(),
        )
        upload_mlbf_to_remote_settings()
        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.assert_called_once()
        self.cleanup_files_mock.assert_called_once()
        assert get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)
        self.statsd_incr_mock.reset_mock()

        frozen_time.tick()
        # If the first block is deleted the last_modified date won't have
        # changed, but the number of blocks will, so trigger a new filter.
        last_modified = get_blocklist_last_modified_time()
        self.block.delete()
        assert last_modified == get_blocklist_last_modified_time()
        upload_mlbf_to_remote_settings()
        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.call_count == 2
        self.cleanup_files_mock.call_count == 2

        self.statsd_incr_mock.assert_has_calls([
            mock.call(f'{STATSD_PREFIX}blocked_changed', 1),
            mock.call(f'{STATSD_PREFIX}blocked_count', 1),
            mock.call(f'{STATSD_PREFIX}not_blocked_count', 4),
            mock.call('blocklist.tasks.upload_filter.upload_stash'),
            mock.call(f'{STATSD_PREFIX}success'),
        ])
        self.cleanup_files_mock.assert_called_with(base_filter_id=last_time)
Exemplo n.º 13
0
def upload_filter_to_kinto(generation_time):
    server = KintoServer(KINTO_BUCKET,
                         KINTO_COLLECTION_MLBF,
                         kinto_sign_off_needed=False)
    data = {
        'key_format': MLBF.KEY_FORMAT,
        'generation_time': generation_time,
    }
    mlbf_path = MLBF(generation_time).filter_path
    with storage.open(mlbf_path) as filter_file:
        attachment = ('filter.bin', filter_file, 'application/octet-stream')
        server.publish_attachment(data, attachment)
    server.complete_session()
    set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True)
Exemplo n.º 14
0
    def test_handle_no_min_approved_updates(self):
        # With only one of the 2 keys set, raise CommandError.
        Config.objects.get(key='AUTO_APPROVAL_MIN_APPROVED_UPDATES').delete()
        assert get_config('AUTO_APPROVAL_MIN_APPROVED_UPDATES') is None
        with self.assertRaises(CommandError):
            call_command('auto_approve')

        # With both keys set but min approved updates is 0, raise CommandError.
        set_config('AUTO_APPROVAL_MIN_APPROVED_UPDATES', 0)
        with self.assertRaises(CommandError):
            call_command('auto_approve')

        # With both keys set to non-zero, everything should work.
        set_config('AUTO_APPROVAL_MIN_APPROVED_UPDATES', 1)
        call_command('auto_approve')
Exemplo n.º 15
0
    def test_response(self):
        response = self.client.get(self.url)
        assert response.status_code == 200
        assert response.data == {
            'read_only': False,
            'notice': None,
        }

        set_config('site_notice', 'THIS is NOT Á TEST!')
        with override_settings(READ_ONLY=True):
            response = self.client.get(self.url)
        assert response.data == {
            'read_only': True,
            'notice': 'THIS is NOT Á TEST!',
        }
Exemplo n.º 16
0
    def test_has_read_developer_agreement(self):
        set_config('last_dev_agreement_change_date', '2018-01-01 00:00')
        after_change = (datetime(2018, 1, 1) + timedelta(days=1))
        before_change = (datetime(2018, 1, 1) - timedelta(days=42))

        assert not UserProfile().has_read_developer_agreement()
        assert not UserProfile(
            read_dev_agreement=None).has_read_developer_agreement()
        assert not UserProfile(
            read_dev_agreement=before_change).has_read_developer_agreement()

        # User has read the agreement after it was modified for
        # post-review: it should return True.
        assert UserProfile(
            read_dev_agreement=after_change).has_read_developer_agreement()
Exemplo n.º 17
0
    def test_stash_because_many_mlbf(self):
        set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True)
        set_config(MLBF_BASE_ID_CONFIG_KEY, 987654, json_value=True)
        prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456',
                                         'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump(['madeup@guid:12345'], blocked_file)
        base_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '987654',
                                         'blocked.json')
        with storage.open(base_blocked_path, 'w') as blocked_file:
            json.dump([], blocked_file)

        upload_mlbf_to_remote_settings()

        generation_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)

        self.publish_attachment_mock.assert_called_with(
            {
                'key_format': MLBF.KEY_FORMAT,
                'generation_time': generation_time,
                'attachment_type': 'bloomfilter-full'
            }, ('filter.bin', mock.ANY, 'application/octet-stream'))
        self.publish_record_mock.assert_called_with({
            'key_format': MLBF.KEY_FORMAT,
            'stash_time': generation_time,
            'stash': {
                'blocked': [
                    f'{self.block.guid}:'
                    f'{self.block.addon.current_version.version}'
                ],
                'unblocked': ['madeup@guid:12345']
            }
        })
        self.delete_mock.assert_not_called()
        assert (get_config(MLBF_TIME_CONFIG_KEY,
                           json_value=True) == generation_time)
        assert (get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == 987654)

        self.statsd_incr_mock.assert_has_calls([
            mock.call(f'{STATSD_PREFIX}blocked_changed', 2),
            mock.call(f'{STATSD_PREFIX}blocked_count', 1),
            mock.call(f'{STATSD_PREFIX}not_blocked_count', 3),
            mock.call('blocklist.tasks.upload_filter.upload_stash'),
            mock.call('blocklist.tasks.upload_filter.upload_mlbf'),
            mock.call('blocklist.tasks.upload_filter.upload_mlbf.full'),
            mock.call(f'{STATSD_PREFIX}success'),
        ])
Exemplo n.º 18
0
    def test_handle_no_max_average_daily_users(self):
        # With only one of the 2 keys set, raise CommandError.
        Config.objects.get(
            key='AUTO_APPROVAL_MAX_AVERAGE_DAILY_USERS').delete()
        assert get_config('AUTO_APPROVAL_MAX_AVERAGE_DAILY_USERS') is None
        with self.assertRaises(CommandError):
            call_command('auto_approve')

        # With both keys set but daily users is 0, raise CommandError.
        set_config('AUTO_APPROVAL_MAX_AVERAGE_DAILY_USERS', 0)
        with self.assertRaises(CommandError):
            call_command('auto_approve')

        # With both keys set to non-zero, everything should work.
        set_config('AUTO_APPROVAL_MAX_AVERAGE_DAILY_USERS', 10000)
        call_command('auto_approve')
Exemplo n.º 19
0
    def test_has_read_developer_agreement(self):
        set_config('last_dev_agreement_change_date', '2018-01-01 00:00')
        after_change = (
            datetime(2018, 1, 1) + timedelta(days=1))
        before_change = (
            datetime(2018, 1, 1) - timedelta(days=42))

        assert not UserProfile().has_read_developer_agreement()
        assert not UserProfile(
            read_dev_agreement=None).has_read_developer_agreement()
        assert not UserProfile(
            read_dev_agreement=before_change).has_read_developer_agreement()

        # User has read the agreement after it was modified for
        # post-review: it should return True.
        assert UserProfile(
            read_dev_agreement=after_change).has_read_developer_agreement()
Exemplo n.º 20
0
    def setUp(self):
        self.addon = addon_factory(average_daily_users=666)
        self.version = version_factory(
            addon=self.addon, file_kw={
                'status': amo.STATUS_AWAITING_REVIEW,
                'is_webextension': True})
        self.file = self.version.all_files[0]
        self.file_validation = FileValidation.objects.create(
            file=self.version.all_files[0], validation=u'{}')
        AddonApprovalsCounter.objects.create(addon=self.addon, counter=1)
        set_config('AUTO_APPROVAL_MAX_AVERAGE_DAILY_USERS', 10000)
        set_config('AUTO_APPROVAL_MIN_APPROVED_UPDATES', 1)

        # Always mock log_final_summary() method so we can look at the stats
        # easily.
        patcher = mock.patch.object(auto_approve.Command, 'log_final_summary')
        self.log_final_summary_mock = patcher.start()
        self.addCleanup(patcher.stop)
Exemplo n.º 21
0
    def test_has_read_developer_agreement(self):
        set_config('last_dev_agreement_change_date', '2019-06-12 00:00')
        after_change = datetime(2019, 6, 12) + timedelta(days=1)
        before_change = datetime(2019, 6, 12) - timedelta(days=42)

        assert not UserProfile.objects.create(
            username='******').has_read_developer_agreement()
        assert not UserProfile.objects.create(
            username='******',
            read_dev_agreement=None).has_read_developer_agreement()
        assert not UserProfile.objects.create(
            username='******',
            read_dev_agreement=before_change).has_read_developer_agreement()

        # User has read the agreement after it was modified for
        # post-review: it should return True.
        assert UserProfile.objects.create(
            username='******',
            read_dev_agreement=after_change).has_read_developer_agreement()
Exemplo n.º 22
0
    def test_stash_because_previous_mlbf(self):
        set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True)
        set_config(MLBF_BASE_ID_CONFIG_KEY, 123456, json_value=True)
        prev_blocked_path = os.path.join(
            settings.MLBF_STORAGE_PATH, '123456', 'blocked.json'
        )
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump(['madeup@guid:123'], blocked_file)

        upload_mlbf_to_remote_settings()

        generation_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000
        )

        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.assert_called_with(
            {
                'key_format': MLBF.KEY_FORMAT,
                'stash_time': generation_time,
                'stash': {
                    'blocked': [
                        f'{self.block.guid}:'
                        f'{self.block.addon.current_version.version}'
                    ],
                    'unblocked': ['madeup@guid:123'],
                },
            }
        )
        self.delete_mock.assert_not_called()
        assert get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == generation_time
        assert get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == 123456

        self.statsd_incr_mock.assert_has_calls(
            [
                mock.call(f'{STATSD_PREFIX}blocked_changed', 2),
                mock.call(f'{STATSD_PREFIX}blocked_count', 1),
                mock.call(f'{STATSD_PREFIX}not_blocked_count', 3),
                mock.call('blocklist.tasks.upload_filter.upload_stash'),
                mock.call(f'{STATSD_PREFIX}success'),
            ]
        )
        self.cleanup_files_mock.assert_called_with(base_filter_id=123456)
Exemplo n.º 23
0
    def test_no_need_for_new_mlbf(self, publish_mock):
        # This was the last time the mlbf was generated
        last_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 1).timestamp() * 1000)
        # And the Block was modified just before so would be included
        self.block.update(modified=datetime.datetime(2020, 1, 1, 12, 34, 0))
        set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True)
        upload_mlbf_to_kinto()
        # So no need for a new bloomfilter
        publish_mock.assert_not_called()

        # But if we add a new Block a new filter is needed
        addon_factory()
        Block.objects.create(
            addon=addon_factory(),
            updated_by=user_factory())
        upload_mlbf_to_kinto()
        publish_mock.assert_called_once()
        assert (
            get_config(MLBF_TIME_CONFIG_KEY, json_value=True) ==
            int(datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000))
Exemplo n.º 24
0
def upload_filter(generation_time, is_base=True, upload_stash=False):
    bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET
    server = RemoteSettings(
        bucket, REMOTE_SETTINGS_COLLECTION_MLBF, sign_off_needed=False)
    mlbf = MLBF(generation_time)
    if is_base:
        # clear the collection for the base - we want to be the only filter
        server.delete_all_records()
        statsd.incr('blocklist.tasks.upload_filter.reset_collection')
    # Deal with possible stashes first
    if upload_stash:
        # If we have a stash, write that
        stash_data = {
            'key_format': MLBF.KEY_FORMAT,
            'stash_time': generation_time,
            'stash': mlbf.stash_json,
        }
        server.publish_record(stash_data)
        statsd.incr('blocklist.tasks.upload_filter.upload_stash')

    # Then the bloomfilter
    data = {
        'key_format': MLBF.KEY_FORMAT,
        'generation_time': generation_time,
        'attachment_type':
            BLOCKLIST_RECORD_MLBF_BASE if is_base else
            BLOCKLIST_RECORD_MLBF_UPDATE,
    }
    with storage.open(mlbf.filter_path, 'rb') as filter_file:
        attachment = ('filter.bin', filter_file, 'application/octet-stream')
        server.publish_attachment(data, attachment)
        statsd.incr('blocklist.tasks.upload_filter.upload_mlbf')
    statsd.incr(
        'blocklist.tasks.upload_filter.upload_mlbf.'
        f'{"base" if is_base else "full"}')
    server.complete_session()
    set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True)
    if is_base:
        set_config(MLBF_BASE_ID_CONFIG_KEY, generation_time, json_value=True)
Exemplo n.º 25
0
    def test_stash_file(self, publish_mock):
        set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True)
        prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456',
                                         'blocked.json')
        with storage.open(prev_blocked_path, 'w') as blocked_file:
            json.dump(['madeup@guid:123'], blocked_file)

        upload_mlbf_to_kinto()

        generation_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)

        stash_path = os.path.join(settings.MLBF_STORAGE_PATH,
                                  str(generation_time), 'stash.json')
        assert os.path.exists(stash_path)
        assert os.path.getsize(stash_path)
        with open(stash_path) as stash_file:
            blocked_guid = (f'{self.block.guid}:'
                            f'{self.block.addon.current_version.version}')
            assert json.load(stash_file) == {
                'blocked': [blocked_guid],
                'unblocked': ['madeup@guid:123']
            }
Exemplo n.º 26
0
    def test_no_block_changes(self):
        # This was the last time the mlbf was generated
        last_time = int(
            datetime.datetime(2020, 1, 1, 12, 34, 1).timestamp() * 1000)
        # And the Block was modified just before so would be included
        self.block.update(modified=datetime.datetime(2020, 1, 1, 12, 34, 0))
        set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True)
        upload_mlbf_to_kinto()
        # So no need for a new bloomfilter
        self.publish_attachment_mock.assert_not_called()
        self.publish_record_mock.assert_not_called()

        # But if we add a new Block a new filter is needed
        addon_factory()
        Block.objects.create(addon=addon_factory(file_kw={
            'is_signed': True,
            'is_webextension': True
        }),
                             updated_by=user_factory())
        upload_mlbf_to_kinto()
        self.publish_attachment_mock.assert_called_once()
        assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int(
            datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000))
Exemplo n.º 27
0
def upload_mlbf_to_kinto():
    if not waffle.switch_is_active('blocklist_mlbf_submit'):
        log.info('Upload MLBF to kinto cron job disabled.')
        return
    last_generation_time = get_config(MLBF_TIME_CONFIG_KEY, 0, json_value=True)
    if last_generation_time > _get_blocklist_last_modified_time():
        log.info(
            'No new/modified Blocks in database; skipping MLBF generation')
        return

    log.info('Starting Upload MLBF to kinto cron job.')
    server = KintoServer(KINTO_BUCKET,
                         KINTO_COLLECTION_MLBF,
                         kinto_sign_off_needed=False)
    stats = {}
    key_format = get_mlbf_key_format()
    # This timestamp represents the point in time when all previous addon
    # guid + versions and blocks were used to generate the bloomfilter.
    # An add-on version/file from before this time will definitely be accounted
    # for in the bloomfilter so we can reliably assert if it's blocked or not.
    # An add-on version/file from after this time can't be reliably asserted -
    # there may be false positives or false negatives.
    # https://github.com/mozilla/addons-server/issues/13695
    generation_time = int(time.time() * 1000)
    bloomfilter = generate_mlbf(stats, key_format)
    with tempfile.NamedTemporaryFile() as filter_file:
        bloomfilter.tofile(filter_file)
        filter_file.seek(0)
        data = {
            'key_format': key_format,
            'generation_time': generation_time,
        }
        attachment = ('filter.bin', filter_file, 'application/octet-stream')
        server.publish_attachment(data, attachment)
    server.complete_session()
    set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True)
    log.info(json.dumps(stats))
Exemplo n.º 28
0
def test_set_config():
    assert Config.objects.filter(key='foo').count() == 0
    set_config('foo', 'bar')
    assert Config.objects.get(key='foo').value == 'bar'

    # Overwrites existing values
    set_config('key', 'value 1')
    set_config('key', 'value 2')

    assert Config.objects.get(key='key').value == 'value 2'
Exemplo n.º 29
0
def test_set_config():
    assert Config.objects.filter(key='foo').count() == 0
    set_config('foo', 'bar')
    assert Config.objects.get(key='foo').value == 'bar'

    # Overwrites existing values
    set_config('key', 'value 1')
    set_config('key', 'value 2')

    assert Config.objects.get(key='key').value == 'value 2'