def test__write_only_modified(self, key_mock): """ Test that we only upload files that have changed """ self.app.config['FLASKS3_ONLY_MODIFIED'] = True static_folder = tempfile.mkdtemp() static_url_loc = static_folder filenames = [os.path.join(static_folder, f) for f in ['foo.css', 'bar.css']] expected = [] data_iter = count() for filename in filenames: # Write random data into files with open(filename, 'wb') as f: if six.PY3: data = str(data_iter) f.write(data.encode()) else: data = str(data_iter.next()) f.write(data) # We expect each file to be uploaded expected.append(call.put_object(ACL='public-read', Bucket=None, Key=filename.lstrip("/"), Body=data, Metadata={}, Expires='Thu, 31 Dec 2037 23:59:59 GMT', ContentEncoding='gzip')) files = {(static_url_loc, static_folder): filenames} hashes = flask_s3._upload_files(key_mock, self.app, files, None) # All files are uploaded and hashes are returned self.assertLessEqual(len(expected), len(key_mock.mock_calls)) self.assertEquals(len(hashes), len(filenames)) # We now modify the second file with open(filenames[1], 'wb') as f: data = str(next(data_iter)) if six.PY2: f.write(data) else: f.write(data.encode()) # We expect only this file to be uploaded expected.append(call.put_object(ACL='public-read', Bucket=None, Key=filenames[1].lstrip("/"), Body=data, Metadata={}, Expires='Thu, 31 Dec 2037 23:59:59 GMT', ContentEncoding='gzip')) new_hashes = flask_s3._upload_files(key_mock, self.app, files, None, hashes=dict(hashes)) #import pprint #pprint.pprint(zip(expected, key_mock.mock_calls)) self.assertEquals(len(expected), len(key_mock.mock_calls))
def test_write_endfile(self): mbucket = make_mock_bucket([]) with patch.object(s3.s3_resource, "Bucket", return_value=mbucket) as mock_bucket_method: s3.write_endfile("bucketname", "resultpath") assert (mock_bucket_method.mock_calls) == [call('bucketname')] assert (mock_bucket_method.return_value.mock_calls) == [ call.put_object(Body=b'end of analysis marker', Key='resultpath/process_results/end.txt') ]
def test_write_active_monitorlog(self): mbucket = make_mock_bucket([]) with patch.object(s3.s3_resource, "Bucket", return_value=mbucket) as mock_bucket_method: s3.write_active_monitorlog("bucketname", "resultpath", {"key": "value"}) assert (mock_bucket_method.mock_calls) == [call('bucketname')] print((mock_bucket_method.return_value.mock_calls)) assert (mock_bucket_method.return_value.mock_calls) == [ call.put_object(Body=b'{\n "key": "value"\n}', Key='logs/active/resultpath') ]
def test_update_monitorlog_start(self): time = "t0" log_init = {"start": "init_val", "end": "init_val"} instance_id = "i-12345" a = return_yaml_read() mbucket = make_mock_bucket([]) file_object_mock = make_mock_file_object(a) mbucket.put_object.return_value = file_object_mock with patch.object(s3.s3_resource, "Bucket", return_value=mbucket) as mock_bucket_method: with patch('ncap_iac.protocols.utilsparam.s3.load_json', return_value=log_init) as mock_fileobj_method: log_test = {"start": "init_val", "end": time} s3.update_monitorlog("bucketname", instance_id, "shutting-down", time) print((mock_bucket_method.return_value.put_object.mock_calls)) assert (mock_bucket_method.mock_calls) == [call('bucketname')] assert (mock_bucket_method.return_value.mock_calls) == [ call.put_object(Body=bytes( json.dumps(log_test, indent=2).encode('UTF-8')), Key=os.path.join("logs", "active", instance_id)) ]