def legacy_publish_blocks(blocks): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings(bucket, REMOTE_SETTINGS_COLLECTION_LEGACY) for block in blocks: needs_creating = not block.legacy_id if block.is_imported_from_legacy_regex: log.info(f'Block [{block.guid}] was imported from a regex guid so ' "can't be safely updated. Skipping.") continue data = { 'guid': block.guid, 'details': { 'bug': block.url, 'why': block.reason, 'name': str(block.reason).partition('.')[0], # required }, 'enabled': True, 'versionRange': [{ 'severity': 3, # Always high severity now. 'minVersion': block.min_version, 'maxVersion': block.max_version, }], } if needs_creating: record = server.publish_record(data) block.update(legacy_id=record.get('id', '')) else: server.publish_record(data, block.legacy_id) server.complete_session()
def legacy_delete_blocks(blocks): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings(bucket, REMOTE_SETTINGS_COLLECTION_LEGACY) for block in blocks: if block.legacy_id: if block.is_imported_from_legacy_regex: log.info( f'Block [{block.guid}] was imported from a regex guid so ' "can't be safely deleted. Skipping.") else: server.delete_record(block.legacy_id) block.update(legacy_id='') server.complete_session()
def test_complete_session_no_signoff(self): server = RemoteSettings('foo', 'baa', sign_off_needed=False) server._setup_done = True # should return because nothing to signoff server.complete_session() server._changes = True url = settings.REMOTE_SETTINGS_WRITER_URL + 'buckets/foo/collections/baa' responses.add(responses.PATCH, url, content_type='application/json') server.complete_session() assert not server._changes assert (responses.calls[0].request.body == json.dumps({ 'data': { 'status': 'to-sign' } }).encode())
def upload_filter(generation_time, is_base=True, upload_stash=False): bucket = settings.REMOTE_SETTINGS_WRITER_BUCKET server = RemoteSettings( bucket, REMOTE_SETTINGS_COLLECTION_MLBF, sign_off_needed=False) mlbf = MLBF(generation_time) if is_base: # clear the collection for the base - we want to be the only filter server.delete_all_records() statsd.incr('blocklist.tasks.upload_filter.reset_collection') # Deal with possible stashes first if upload_stash: # If we have a stash, write that stash_data = { 'key_format': MLBF.KEY_FORMAT, 'stash_time': generation_time, 'stash': mlbf.stash_json, } server.publish_record(stash_data) statsd.incr('blocklist.tasks.upload_filter.upload_stash') # Then the bloomfilter data = { 'key_format': MLBF.KEY_FORMAT, 'generation_time': generation_time, 'attachment_type': BLOCKLIST_RECORD_MLBF_BASE if is_base else BLOCKLIST_RECORD_MLBF_UPDATE, } with storage.open(mlbf.filter_path, 'rb') as filter_file: attachment = ('filter.bin', filter_file, 'application/octet-stream') server.publish_attachment(data, attachment) statsd.incr('blocklist.tasks.upload_filter.upload_mlbf') statsd.incr( 'blocklist.tasks.upload_filter.upload_mlbf.' f'{"base" if is_base else "full"}') server.complete_session() set_config(MLBF_TIME_CONFIG_KEY, generation_time, json_value=True) if is_base: set_config(MLBF_BASE_ID_CONFIG_KEY, generation_time, json_value=True)