def test_stash_because_previous_mlbf(self): set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True) set_config(MLBF_BASE_ID_CONFIG_KEY, 123456, json_value=True) prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456', 'blocked.json') with storage.open(prev_blocked_path, 'w') as blocked_file: json.dump(['madeup@guid:123'], blocked_file) upload_mlbf_to_kinto() generation_time = int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000) self.publish_attachment_mock.assert_called_with( { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': 'bloomfilter-full' }, ('filter.bin', mock.ANY, 'application/octet-stream')) self.publish_record_mock.assert_called_with({ 'key_format': MLBF.KEY_FORMAT, 'stash_time': generation_time, 'stash': { 'blocked': [ f'{self.block.guid}:' f'{self.block.addon.current_version.version}' ], 'unblocked': ['madeup@guid:123'] } }) self.delete_mock.assert_not_called() assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == generation_time) assert (get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == 123456)
def test_reset_base_because_over_reset_threshold(self, should_reset_mock): should_reset_mock.return_value = True set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True) set_config(MLBF_BASE_ID_CONFIG_KEY, 987654, json_value=True) prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456', 'blocked.json') with storage.open(prev_blocked_path, 'w') as blocked_file: json.dump(['madeup@guid:12345'], blocked_file) base_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '987654', 'blocked.json') with storage.open(base_blocked_path, 'w') as blocked_file: json.dump([], blocked_file) upload_mlbf_to_kinto() generation_time = int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000) self.publish_attachment_mock.assert_called_with( { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': 'bloomfilter-base' }, ('filter.bin', mock.ANY, 'application/octet-stream')) self.publish_record_mock.assert_not_called() self.delete_mock.assert_called_once() assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == generation_time) assert (get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == generation_time) gen_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time)) # no stash because we're starting with a new base mlbf assert not os.path.exists(os.path.join(gen_path, 'stash.json'))
def test_no_previous_mlbf(self): upload_mlbf_to_kinto() generation_time = int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000) self.publish_attachment_mock.assert_called_with( { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': 'bloomfilter-base' }, ('filter.bin', mock.ANY, 'application/octet-stream')) assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == generation_time) assert (get_config(MLBF_BASE_ID_CONFIG_KEY, json_value=True) == generation_time) self.publish_record_mock.assert_not_called() self.delete_mock.assert_called_once() gen_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time)) assert os.path.getsize(os.path.join(gen_path, 'filter')) assert os.path.getsize(os.path.join(gen_path, 'blocked.json')) assert os.path.getsize(os.path.join(gen_path, 'notblocked.json')) # no stash because no previous mlbf assert not os.path.exists(os.path.join(gen_path, 'stash.json'))
def test_upload_mlbf_to_kinto(self, publish_mock): upload_mlbf_to_kinto() generation_time = int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000) publish_mock.assert_called_with( { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time }, ('filter.bin', mock.ANY, 'application/octet-stream')) assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == generation_time) mlfb_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time), 'filter') assert os.path.exists(mlfb_path) assert os.path.getsize(mlfb_path) blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time), 'blocked.json') assert os.path.exists(blocked_path) assert os.path.getsize(blocked_path) not_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time), 'notblocked.json') assert os.path.exists(not_blocked_path) assert os.path.getsize(not_blocked_path)
def test_waffle_off_disables_publishing(publish_mock): addon_factory() Block.objects.create( addon=addon_factory(), updated_by=user_factory()) upload_mlbf_to_kinto() publish_mock.assert_not_called()
def test_waffle_off_disables_publishing(self): upload_mlbf_to_kinto() self.publish_attachment_mock.assert_not_called() self.publish_record_mock.assert_not_called() assert not get_config(MLBF_TIME_CONFIG_KEY) # except when 'bypass_switch' kwarg is passed upload_mlbf_to_kinto(bypass_switch=True) self.publish_attachment_mock.assert_called() assert get_config(MLBF_TIME_CONFIG_KEY)
def test_upload_mlbf_to_kinto(publish_mock, get_mlbf_key_format_mock): key_format = get_mlbf_key_format() get_mlbf_key_format_mock.return_value = key_format addon_factory() Block.objects.create( addon=addon_factory(), updated_by=user_factory()) upload_mlbf_to_kinto() publish_mock.assert_called_with( {'key_format': key_format}, ('filter.bin', mock.ANY, 'application/octet-stream'))
def test_upload_mlbf_to_kinto(self, publish_mock, get_mlbf_key_mock): key_format = get_mlbf_key_format() get_mlbf_key_mock.return_value = key_format upload_mlbf_to_kinto() publish_mock.assert_called_with( {'key_format': key_format, 'generation_time': datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000}, ('filter.bin', mock.ANY, 'application/octet-stream')) assert ( get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int(datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000))
def test_no_need_for_new_mlbf(self, publish_mock): # This was the last time the mlbf was generated last_time = int( datetime.datetime(2020, 1, 1, 12, 34, 1).timestamp() * 1000) # And the Block was modified just before so would be included self.block.update(modified=datetime.datetime(2020, 1, 1, 12, 34, 0)) set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True) upload_mlbf_to_kinto() # So no need for a new bloomfilter publish_mock.assert_not_called() # But if we add a new Block a new filter is needed addon_factory() Block.objects.create( addon=addon_factory(), updated_by=user_factory()) upload_mlbf_to_kinto() publish_mock.assert_called_once() assert ( get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int(datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000))
def test_no_block_changes(self): # This was the last time the mlbf was generated last_time = int( datetime.datetime(2020, 1, 1, 12, 34, 1).timestamp() * 1000) # And the Block was modified just before so would be included self.block.update(modified=datetime.datetime(2020, 1, 1, 12, 34, 0)) set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True) upload_mlbf_to_kinto() # So no need for a new bloomfilter self.publish_attachment_mock.assert_not_called() self.publish_record_mock.assert_not_called() # But if we add a new Block a new filter is needed addon_factory() Block.objects.create(addon=addon_factory(file_kw={ 'is_signed': True, 'is_webextension': True }), updated_by=user_factory()) upload_mlbf_to_kinto() self.publish_attachment_mock.assert_called_once() assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000))
def test_stash_file(self, publish_mock): set_config(MLBF_TIME_CONFIG_KEY, 123456, json_value=True) prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, '123456', 'blocked.json') with storage.open(prev_blocked_path, 'w') as blocked_file: json.dump(['madeup@guid:123'], blocked_file) upload_mlbf_to_kinto() generation_time = int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000) stash_path = os.path.join(settings.MLBF_STORAGE_PATH, str(generation_time), 'stash.json') assert os.path.exists(stash_path) assert os.path.getsize(stash_path) with open(stash_path) as stash_file: blocked_guid = (f'{self.block.guid}:' f'{self.block.addon.current_version.version}') assert json.load(stash_file) == { 'blocked': [blocked_guid], 'unblocked': ['madeup@guid:123'] }
def test_no_block_changes(frozen_time, self): # This was the last time the mlbf was generated last_time = int( (frozen_time() - timedelta(seconds=1)).timestamp() * 1000) # And the Block was modified just that before so would be included self.block.update(modified=(frozen_time() - timedelta(seconds=2))) set_config(MLBF_TIME_CONFIG_KEY, last_time, json_value=True) prev_blocked_path = os.path.join(settings.MLBF_STORAGE_PATH, str(last_time), 'blocked.json') with storage.open(prev_blocked_path, 'w') as blocked_file: json.dump([f'{self.block.guid}:1.2b3'], blocked_file) upload_mlbf_to_kinto() # So no need for a new bloomfilter self.publish_attachment_mock.assert_not_called() self.publish_record_mock.assert_not_called() # But if we add a new Block a new filter is needed addon_factory() Block.objects.create(addon=addon_factory(file_kw={ 'is_signed': True, 'is_webextension': True }), updated_by=user_factory()) upload_mlbf_to_kinto() self.publish_attachment_mock.assert_called_once() assert (get_config(MLBF_TIME_CONFIG_KEY, json_value=True) == int( datetime.datetime(2020, 1, 1, 12, 34, 56).timestamp() * 1000)) frozen_time.tick() # If the first block is deleted the last_modified date won't have # changed, but the number of blocks will, so trigger a new filter. last_modified = get_blocklist_last_modified_time() self.block.delete() assert last_modified == get_blocklist_last_modified_time() upload_mlbf_to_kinto() assert self.publish_attachment_mock.call_count == 2 # called again
def test_waffle_off_disables_publishing(self): upload_mlbf_to_kinto() self.publish_attachment_mock.assert_not_called() self.publish_record_mock.assert_not_called() assert not get_config(MLBF_TIME_CONFIG_KEY)
def test_waffle_off_disables_publishing(self, publish_mock): upload_mlbf_to_kinto() publish_mock.assert_not_called() assert not get_config(MLBF_TIME_CONFIG_KEY)