def test_publish_record(self): server = RemoteSettings('foo', 'baa') server._setup_done = True assert not server._changes responses.add( responses.POST, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa/records', content_type='application/json', json={'data': { 'id': 'new!' }}, ) record = server.publish_record({'something': 'somevalue'}) assert server._changes assert record == {'id': 'new!'} url = (settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa/records/an-id') responses.add( responses.PUT, url, content_type='application/json', json={'data': { 'id': 'updated' }}, ) record = server.publish_record({'something': 'somevalue'}, 'an-id') assert record == {'id': 'updated'}
def test_setup_server_bucket(self): server = RemoteSettings('foo', 'baa') # if the server 403s on the bucket it's because it doesn't exist responses.add( responses.GET, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo', content_type='application/json', status=403, ) responses.add( responses.PUT, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo', content_type='application/json', ) # if the server 404s on the collection it's because it doesn't exist responses.add( responses.GET, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa', content_type='application/json', status=404, ) responses.add( responses.PUT, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa', content_type='application/json', status=201, ) server.setup_test_server_collection()
def upload_filter(generation_time, is_base=True, upload_stash=False): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings( bucket, REMOTE_SETTINGS_COLLECTION_MLBF, sign_off_needed=False) mlbf = MLBF(generation_time) if is_base: # clear the collection for the base - we want to be the only filter server.delete_all_records() # Deal with possible stashes first if upload_stash: # If we have a stash, write that stash_data = { 'key_format': MLBF.KEY_FORMAT, 'stash_time': generation_time, 'stash': mlbf.stash_json, } server.publish_record(stash_data) # Then the bloomfilter data = { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': BLOCKLIST_RECORD_MLBF_BASE if is_base else BLOCKLIST_RECORD_MLBF_UPDATE, } with storage.open(mlbf.filter_path, 'rb') as filter_file: attachment = ('filter.bin', filter_file, 'application/octet-stream') server.publish_attachment(data, attachment) server.complete_session() set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True) if is_base: set_config(MLBF_BASE_ID_CONFIG_KEY, generation_time, json_value=True)
def test_delete_all_records(self): server = RemoteSettings('foo', 'baa') server._setup_done = True assert not server._changes url = (settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa/records') responses.add(responses.DELETE, url, content_type='application/json') server.delete_all_records() assert server._changes
def test_setup_server_collection(self): server = RemoteSettings('foo', 'baa') # But if the bucket exists then the collection should still be created responses.add(responses.GET, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo', content_type='application/json') responses.add(responses.GET, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa', content_type='application/json', status=404) responses.add(responses.PUT, settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa', content_type='application/json', status=201) server.setup_test_server_collection()
def legacy_publish_blocks(blocks): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings(bucket, REMOTE_SETTINGS_COLLECTION_LEGACY) for block in blocks: needs_creating = not block.legacy_id if block.is_imported_from_legacy_regex: log.info(f'Block [{block.guid}] was imported from a regex guid so ' "can't be safely updated. Skipping.") continue data = { 'guid': block.guid, 'details': { 'bug': block.url, 'why': block.reason, 'name': str(block.reason).partition('.')[0], # required }, 'enabled': True, 'versionRange': [{ 'severity': 3, # Always high severity now. 'minVersion': block.min_version, 'maxVersion': block.max_version, }], } if needs_creating: record = server.publish_record(data) block.update(legacy_id=record.get('id', '')) else: server.publish_record(data, block.legacy_id) server.complete_session()
def test_setup_server_auth(self): server = RemoteSettings('foo', 'baa') responses.add( responses.GET, settings.REMOTE_SETTINGS_WRITER_URL, content_type='application/json', json={'user': { 'id': '' }}, ) responses.add( responses.PUT, settings.REMOTE_SETTINGS_WRITER_URL + 'accounts/test_username', content_type='application/json', json={'data': { 'password': '******' }}, status=201, ) server.setup_test_server_auth() # If repeated then the account should exist the 2nd time responses.add( responses.GET, settings.REMOTE_SETTINGS_WRITER_URL, content_type='application/json', json={'user': { 'id': 'account:test_username' }}, ) server.setup_test_server_auth()
def test_setup(self): server = RemoteSettings('foo', 'baa') responses.add( responses.GET, settings.REMOTE_SETTINGS_WRITER_URL, content_type='application/json', json={'user': { 'id': 'account:test_username' }}, ) bucket_url = settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo_test_username' responses.add(responses.GET, bucket_url, content_type='application/json') responses.add( responses.GET, bucket_url + '/collections/baa', content_type='application/json', ) server.setup() assert server._setup_done assert server.bucket == 'foo_test_username' server.setup() # a second time shouldn't make any requests
def test_complete_session_no_signoff(self): server = RemoteSettings('foo', 'baa', sign_off_needed=False) server._setup_done = True # should return because nothing to signoff server.complete_session() server._changes = True url = settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa' responses.add(responses.PATCH, url, content_type='application/json') server.complete_session() assert not server._changes assert (responses.calls[0].request.body == json.dumps({ 'data': { 'status': 'to-sign' } }).encode())
def upload_filter(generation_time, is_base=True): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings(bucket, REMOTE_SETTINGS_COLLECTION_MLBF, sign_off_needed=False) mlbf = MLBF.load_from_storage(generation_time) if is_base: # clear the collection for the base - we want to be the only filter server.delete_all_records() statsd.incr('blocklist.tasks.upload_filter.reset_collection') # Then the bloomfilter data = { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': BLOCKLIST_RECORD_MLBF_BASE, } storage = SafeStorage(user_media='mlbf_storage') with storage.open(mlbf.filter_path, 'rb') as filter_file: attachment = ('filter.bin', filter_file, 'application/octet-stream') server.publish_attachment(data, attachment) statsd.incr('blocklist.tasks.upload_filter.upload_mlbf') statsd.incr('blocklist.tasks.upload_filter.upload_mlbf.base') else: # If we have a stash, write that stash_data = { 'key_format': MLBF.KEY_FORMAT, 'stash_time': generation_time, 'stash': mlbf.stash_json, } server.publish_record(stash_data) statsd.incr('blocklist.tasks.upload_filter.upload_stash') server.complete_session() set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True) if is_base: set_config(MLBF_BASE_ID_CONFIG_KEY, generation_time, json_value=True)
def test_publish_attachment(self, uuidmock): uuidmock.uuid4.return_value = 1234567890 server = RemoteSettings('foo', 'baa') server._setup_done = True assert not server._changes url = (settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa/records/1234567890/attachment') responses.add(responses.POST, url, json={'data': {'id': '1234567890'}}) with tempfile.TemporaryFile() as attachment: record = server.publish_attachment({'something': 'somevalue'}, ('file', attachment)) assert server._changes assert record == {'id': '1234567890'} url = (settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa/records/an-id/attachment') responses.add(responses.POST, url, json={'data': {'id': 'an-id'}}) with tempfile.TemporaryFile() as attachment: record = server.publish_attachment({'something': 'somevalue'}, ('otherfile', attachment), 'an-id') assert record == {'id': 'an-id'}
def legacy_delete_blocks(blocks): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings(bucket, REMOTE_SETTINGS_COLLECTION_LEGACY) for block in blocks: if block.legacy_id: if block.is_imported_from_legacy_regex: log.info( f'Block [{block.guid}] was imported from a regex guid so ' "can't be safely deleted. Skipping.") else: server.delete_record(block.legacy_id) block.update(legacy_id='') server.complete_session()
def test_setup_not_test_server(self): server = RemoteSettings('foo', 'baa') server.setup() # will just return assert server._setup_done assert server.bucket == 'foo'