def test_gzip_and_send_s3(self): """ Tests that a gzip is made and sent to S3 and everything cleaned after """ # First create some dummy content to work with output_path = '{0}/test_out/'.format(os.getcwd()) helper_extract_all(cluster=self.cluster, output_path=output_path) with mock_s3(): s3_resource = boto3.resource('s3') s3_resource.create_bucket(Bucket=self.s3_details['bucket']) # Run the gzip and send dashboard.push_to_s3( input_directory=output_path, s3_details=self.s3_details ) # Check there is a gzip in the bucket s3_object = s3_resource.Object( self.s3_details['bucket'], 'dashboard.tar.gz' ) keys = s3_object.get().keys() self.assertTrue( len(keys) > 0 ) # Clean up files shutil.rmtree(output_path)
def test_gzip_and_send_s3(self): """ Tests that a gzip is made and sent to S3 and everything cleaned after """ # First create some dummy content to work with output_path = '{0}/test_out/'.format(os.getcwd()) helper_extract_all(cluster=self.cluster, output_path=output_path) # Run the gzip and send try: stub_response = dict( status_code=200, response={'msg': 'success'} ) with MockElasticsearch(stub_response): response = dashboard.push_to_s3( input_directory=output_path, s3_details=self.s3_details ) # Check response from S3 self.assertEqual(response.status_code, 200) # Check there is a gzip on disk gb = glob.glob('{0}/*.gz'.format(output_path)) self.assertTrue( len(gb) == 1 ) except Exception as error: self.fail(error) finally: # Clean up files shutil.rmtree(output_path)