def get_s3_store(self): config = get_config(cls=TelemetryBotoS3CrashStorage, values_source={ 'resource_class': S3ConnectionContext, 'bucket_name': 'telemetry-crashes', }) return TelemetryBotoS3CrashStorage(config)
def test_upload_worked(self, boto_helper): boto_helper.get_or_create_bucket('crashstats') config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 0 app.config.logger.info.assert_called_once_with( 'Success: Schema uploaded!' )
def get_s3_store(self): config = get_config( cls=TelemetryCrashData, values_source={ 'bucket_name': 'telemetry-bucket' } ) return TelemetryCrashData(config)
def test_bucket_not_found(self, caplogpp): # If the bucket isn't found, the script should tell the user and return # a non-zero exit code config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 1 recs = [rec.message for rec in caplogpp.records] assert "Failure: The crashstats S3 bucket must be created first." in recs
def test_upload_worked(self, boto_helper, caplogpp): caplogpp.set_level('DEBUG') boto_helper.get_or_create_bucket('crashstats') config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 0 recs = [rec.message for rec in caplogpp.records] assert 'Success: Schema uploaded!' in recs
def test_bucket_not_found(self, caplogpp): # If the bucket isn't found, the script should tell the user and return # a non-zero exit code config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 1 recs = [rec.message for rec in caplogpp.records] assert 'Failure: The crashstats S3 bucket must be created first.' in recs
def test_upload_worked(self, boto_helper, caplogpp): caplogpp.set_level("DEBUG") boto_helper.get_or_create_bucket("crashstats") config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 0 recs = [rec.message for rec in caplogpp.records] assert "Success: Schema uploaded!" in recs
def get_s3_store(self): config = get_config( cls=TelemetryBotoS3CrashStorage, values_source={ "resource_class": S3ConnectionContext, "bucket_name": "telemetry-crashes", }, ) return TelemetryBotoS3CrashStorage(config)
def get_s3_store(self): config = get_config( cls=TelemetryBotoS3CrashStorage, values_source={ 'resource_class': S3ConnectionContext, 'bucket_name': 'telemetry-crashes', } ) return TelemetryBotoS3CrashStorage(config)
def test_bucket_not_found(self): # If the bucket isn't found, the script should tell the user and return # a non-zero exit code config = get_config(UploadTelemetrySchema) app = UploadTelemetrySchema(config) assert app.main() == 1 app.config.logger.error.assert_called_once_with( 'Failure: The %s S3 bucket must be created first.', 'crashstats' )
def test_load_file_doesnt_exist(self, boto_helper): """Test loading a file that isn't there.""" config = get_config(cls=S3Connection) conn = S3Connection(config) bucket = conn.config.bucket_name path = "/test/testfile.txt" boto_helper.create_bucket(bucket) with pytest.raises(KeyNotFound): conn.load_file(path)
def test_load_file(self, boto_helper): """Test loading a file that isn't there.""" config = get_config(cls=S3Connection) conn = S3Connection(config) bucket = conn.config.bucket_name path = "/test/testfile.txt" file_data = b"test file contents" boto_helper.create_bucket(bucket) boto_helper.upload_fileobj(bucket, path, file_data) data = conn.load_file(path) assert data == file_data
def setup_mocked_s3_storage(tmpdir=None, storage_class=BotoS3CrashStorage, bucket_name="crash_storage", **extra): values_source = { "resource_class": S3ConnectionContext, "bucket_name": bucket_name, "prefix": "dev", } values_source.update(extra) config = get_config(cls=storage_class, values_source=values_source) if tmpdir is not None: config.temporary_file_system_storage_path = str(tmpdir) return storage_class(config)
def setup_mocked_s3_storage(tmpdir=None, storage_class=BotoS3CrashStorage, bucket_name='crash_storage', **extra): values_source = { 'resource_class': S3ConnectionContext, 'bucket_name': bucket_name, 'prefix': 'dev', } values_source.update(extra) config = get_config(cls=storage_class, values_source=values_source) if tmpdir is not None: config.temporary_file_system_storage_path = str(tmpdir) return storage_class(config)
def setup_mocked_s3_storage(cls=S3ConnectionContext, **extra): values_source = { 'bucket_name': 'silliness', 'prefix': 'dev', 'calling_format': mock.Mock() } values_source.update(extra) config = get_config(cls=cls, values_source=values_source) s3_conn = cls(config) s3_conn._connect_to_endpoint = mock.Mock() s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value s3_conn._calling_format.return_value = mock.Mock() s3_conn._CreateError = mock.Mock() s3_conn.ResponseError = mock.Mock() s3_conn._open = mock.MagicMock() return s3_conn
def setup_mocked_s3_storage(cls=S3ConnectionContext, **extra): values_source = { "bucket_name": "silliness", "prefix": "dev", "boto_metrics_prefix": "processor.s3", "calling_format": mock.Mock(), } values_source.update(extra) config = get_config(cls=cls, values_source=values_source) s3_conn = cls(config) s3_conn._connect_to_endpoint = mock.Mock() s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value s3_conn._calling_format.return_value = mock.Mock() s3_conn._CreateError = mock.Mock() s3_conn.ResponseError = mock.Mock() s3_conn._open = mock.MagicMock() return s3_conn
def test_save_file(self, boto_helper): """Test saving a file and make sure it's there.""" config = get_config(cls=S3Connection) conn = S3Connection(config) bucket = conn.config.bucket_name path = "/test/testfile.txt" file_data = b"test file contents" boto_helper.create_bucket(bucket) conn.save_file(path, file_data) objects = boto_helper.list(bucket) assert objects == ["/test/testfile.txt"] assert boto_helper.download_fileobj(bucket, path) == file_data # Stomp on that file with a new one file_data2 = b"test file contents 2" conn.save_file(path, file_data2) assert boto_helper.download_fileobj(bucket, path) == file_data2
def setup_mocked_s3_storage( tmpdir=None, storage_class=BotoS3CrashStorage, bucket_name='crash_storage', **extra ): values_source = { 'resource_class': S3ConnectionContext, 'bucket_name': bucket_name, 'prefix': 'dev', } values_source.update(extra) config = get_config( cls=storage_class, values_source=values_source ) if tmpdir is not None: config.temporary_file_system_storage_path = str(tmpdir) return storage_class(config)
def setup_mocked_s3_storage(cls=S3ConnectionContext, **extra): values_source = { 'bucket_name': 'silliness', 'prefix': 'dev', 'calling_format': mock.Mock() } values_source.update(extra) config = get_config( cls=cls, values_source=values_source ) s3_conn = cls(config) s3_conn._connect_to_endpoint = mock.Mock() s3_conn._mocked_connection = s3_conn._connect_to_endpoint.return_value s3_conn._calling_format.return_value = mock.Mock() s3_conn._CreateError = mock.Mock() s3_conn.ResponseError = mock.Mock() s3_conn._open = mock.MagicMock() return s3_conn
def get_s3_store(self): return TelemetryBotoS3CrashStorage( config=get_config(TelemetryBotoS3CrashStorage))
def get_s3_store(self): return SimplifiedCrashData(config=get_config(SimplifiedCrashData))
def get_s3_store(self): return TelemetryCrashData(config=get_config(TelemetryCrashData))
def get_s3_store(self, tmpdir=None): values_source = {} if tmpdir is not None: values_source["temporary_file_system_storage_path"] = tmpdir return BotoS3CrashStorage( config=get_config(BotoS3CrashStorage, values_source))
def get_s3_store(self): config = get_config(cls=TelemetryCrashData, values_source={"bucket_name": "telemetry-bucket"}) return TelemetryCrashData(config)