def test_create_job_login_required(): with requests_mock.mock() as m: m.post('https://databasin.org/api/v1/jobs/', status_code=401) c = Client() with pytest.raises(LoginRequiredError): c.create_job('foo_job')
def test_import_netcdf_dataset_incomplete(import_job_data, tmp_file_data, dataset_import_data): import_job_data = copy.copy(import_job_data) import_job_data['message'] = json.dumps({'next_uri': '/datasets/import/a1b2c3/overview/'}) with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/') f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') zf.writestr('style.json', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' with pytest.raises(DatasetImportError): c.import_netcdf_dataset('test.zip') assert m.call_count == 6
def test_datasets_pagination(dataset_data): with requests_mock.mock() as m: page1_data = { 'meta': {'next': '/api/v1/datasets/?limit=2&offset=2', 'total_count': 3}, 'objects': [ dataset_data, copy.copy(dataset_data) ] } page1_data['objects'][1]['id'] = 'a1b2c4' page2_data = { 'meta': {'next': None, 'total_count': 3}, 'objects': [ copy.copy(dataset_data) ] } page2_data['objects'][0]['id'] = 'a1b2c5' m.get('https://databasin.org/api/v1/datasets/', text=json.dumps(page1_data)) m.get('https://databasin.org/api/v1/datasets/?offset=2', text=json.dumps(page2_data)) c = Client() datasets = c.list_datasets() assert len(datasets) == 3 datasets = list(datasets) assert m.call_count == 2 assert len(datasets) == 3 assert datasets[0].id == 'a1b2c3' assert datasets[1].id == 'a1b2c4' assert datasets[2].id == 'a1b2c5'
def test_job_block(job_data): job_data_2 = copy.copy(job_data) job_data_2['status'] = 'succeeded' with requests_mock.mock() as m: m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}, status_code=201) m.get('https://databasin.org/api/v1/jobs/1234/', [{ 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data_2) }]) with mock.patch('time.sleep', mock.Mock()) as time_mock: # Make sure the test actually stops if things don't work as planned time_mock.side_effect = [None, None, None, IOError] c = Client() job = c.create_job('foo', block=True) assert m.call_count == 5 assert job.status == 'succeeded' assert time_mock.call_count == 3
def test_datasets_pagination(dataset_data): with requests_mock.mock() as m: page1_data = { 'meta': { 'next': '/api/v1/datasets/?limit=2&offset=2', 'total_count': 3 }, 'objects': [dataset_data, copy.copy(dataset_data)] } page1_data['objects'][1]['id'] = 'a1b2c4' page2_data = { 'meta': { 'next': None, 'total_count': 3 }, 'objects': [copy.copy(dataset_data)] } page2_data['objects'][0]['id'] = 'a1b2c5' m.get('https://databasin.org/api/v1/datasets/', text=json.dumps(page1_data)) m.get('https://databasin.org/api/v1/datasets/?offset=2', text=json.dumps(page2_data)) c = Client() datasets = c.list_datasets() assert len(datasets) == 3 datasets = list(datasets) assert m.call_count == 2 assert len(datasets) == 3 assert datasets[0].id == 'a1b2c3' assert datasets[1].id == 'a1b2c4' assert datasets[2].id == 'a1b2c5'
def test_job_block(job_data): job_data_2 = copy.copy(job_data) job_data_2["status"] = "succeeded" with requests_mock.mock() as m: m.post( "https://databasin.org/api/v1/jobs/", headers={"Location": "https://databasin.org/api/v1/jobs/1234/"}, status_code=201, ) m.get( "https://databasin.org/api/v1/jobs/1234/", [ {"text": json.dumps(job_data)}, {"text": json.dumps(job_data)}, {"text": json.dumps(job_data)}, {"text": json.dumps(job_data_2)}, ], ) with mock.patch("time.sleep", mock.Mock()) as time_mock: # Make sure the test actually stops if things don't work as planned time_mock.side_effect = [None, None, None, IOError] c = Client() job = c.create_job("foo", block=True) assert m.call_count == 5 assert job.status == "succeeded" assert time_mock.call_count == 3
def test_import_netcdf_dataset_with_zip(import_job_data, dataset_data, tmp_file_data): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data)) f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') zf.writestr('style.json', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' dataset = c.import_netcdf_dataset('test.zip') open_mock.assert_called_once_with('test.zip', 'a+b') assert m.call_count == 5 assert dataset.id == 'a1b2c3' request_data = json.loads(m.request_history[2].text) assert request_data['job_name'] == 'create_import_job' assert request_data['job_args']['file'] == 'abcd' assert request_data['job_args']['dataset_type'] == 'NetCDF_Native'
def test_create_job_login_required(): with requests_mock.mock() as m: m.post("https://databasin.org/api/v1/jobs/", status_code=401) c = Client() with pytest.raises(LoginRequiredError): c.create_job("foo_job")
def test_import_netcdf_dataset_incomplete(import_job_data, tmp_file_data, dataset_import_data): import_job_data = copy.copy(import_job_data) import_job_data['message'] = json.dumps( {'next_uri': '/datasets/import/a1b2c3/overview/'}) with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/') f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') zf.writestr('style.json', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' with pytest.raises(DatasetImportError): c.import_netcdf_dataset('test.zip') assert m.call_count == 6
def test_get_dataset_login_required(): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/datasets/a1b2c3/', status_code=401) c = Client() with pytest.raises(LoginRequiredError): c.get_dataset('a1b2c3')
def test_job_join(job_data): job_data_2 = copy.copy(job_data) job_data_2['status'] = 'succeeded' with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/jobs/1234/', [{ 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data_2) }]) with mock.patch('time.sleep', mock.Mock()) as time_mock: # Make sure the test actually stops if things don't work as planned time_mock.side_effect = [None, None, None, IOError] c = Client() job = c.get_job('1234') assert job.status == 'queued' assert m.call_count == 1 job.join() assert m.call_count == 4 assert job.status == 'succeeded' assert time_mock.call_count == 3
def test_job_join(job_data): job_data_2 = copy.copy(job_data) job_data_2["status"] = "succeeded" with requests_mock.mock() as m: m.get( "https://databasin.org/api/v1/jobs/1234/", [ {"text": json.dumps(job_data)}, {"text": json.dumps(job_data)}, {"text": json.dumps(job_data)}, {"text": json.dumps(job_data_2)}, ], ) with mock.patch("time.sleep", mock.Mock()) as time_mock: # Make sure the test actually stops if things don't work as planned time_mock.side_effect = [None, None, None, IOError] c = Client() job = c.get_job("1234") assert job.status == "queued" assert m.call_count == 1 job.join() assert m.call_count == 4 assert job.status == "succeeded" assert time_mock.call_count == 3
def test_job_refresh(job_data): job_data_2 = copy.copy(job_data) job_data_2.update({'status': 'running', 'progress': 50, 'message': 'Foo'}) with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/jobs/1234/', [{ 'text': json.dumps(job_data) }, { 'text': json.dumps(job_data_2) }]) c = Client() job = c.get_job('1234') assert job.status == 'queued' assert job.progress == 0 assert job.message is None job.refresh() assert m.call_count == 2 assert job.status == 'running' assert job.progress == 50 assert job.message == 'Foo'
def test_import_netcdf_dataset_with_nc(import_job_data, dataset_data, tmp_file_data): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data)) with mock.patch.object(zipfile, 'ZipFile', mock.MagicMock()) as zf_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' dataset = c.import_netcdf_dataset('test.nc', style={'foo': 'bar'}) zf_mock().write.assert_called_once_with('test.nc', 'test.nc') assert m.call_count == 5 assert dataset.id == 'a1b2c3' request_data = json.loads(m.request_history[2].text) assert request_data['job_name'] == 'create_import_job' assert request_data['job_args']['file'] == 'abcd' assert request_data['job_args']['dataset_type'] == 'NetCDF_Native'
def test_get_temporary_file_login_required(tmp_file_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/uploads/temporary-files/1234/', status_code=401) c = Client() with pytest.raises(LoginRequiredError): tmp_file = c.get_temporary_file('1234')
def test_get_dataset_forbidden(): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/datasets/a1b2c3/', status_code=401) c = Client() c.username = '******' with pytest.raises(ForbiddenError): c.get_dataset('a1b2c3')
def test_create_job_forbidden(): with requests_mock.mock() as m: m.post("https://databasin.org/api/v1/jobs/", status_code=401) c = Client() c.username = "******" with pytest.raises(ForbiddenError): c.create_job("foo_job")
def test_create_job_forbidden(): with requests_mock.mock() as m: m.post('https://databasin.org/api/v1/jobs/', status_code=401) c = Client() c.username = '******' with pytest.raises(ForbiddenError): c.create_job('foo_job')
def test_get_temporary_file(tmp_file_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/uploads/temporary-files/1234/', text=json.dumps(tmp_file_data)) c = Client() tmp_file = c.get_temporary_file('1234') assert m.called assert tmp_file.uuid == '1234'
def test_temporary_file_upload_login_required(): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', status_code=401) c = Client() c._session.cookies['csrftoken'] = 'abcd' with pytest.raises(LoginRequiredError): c.upload_temporary_file(StringIO('foo'))
def test_login(): with requests_mock.mock() as m: m.get('https://databasin.org/', cookies={'csrftoken': 'abcd'}) m.post(LOGIN_URL, cookies={'sessionid': 'asdf'}) c = Client() c.login('foo', 'bar') assert m.call_count == 2
def test_dataset_import_cancel_login_required(dataset_import_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/', status_code=401) c = Client() dataset_import = c.get_import('a1b2c3') with pytest.raises(LoginRequiredError): dataset_import.cancel()
def test_get_dataset_import(dataset_import_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) c = Client() dataset_import = c.get_import('a1b2c3') assert m.called assert dataset_import.id == 'a1b2c3'
def test_get_job(job_data): with requests_mock.mock() as m: m.get("https://databasin.org/api/v1/jobs/1234/", text=json.dumps(job_data)) c = Client() job = c.get_job("1234") assert job.id == "1234" assert m.called
def test_dataset_import_cancel(dataset_import_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/', status_code=204) c = Client() dataset_import = c.get_import('a1b2c3') dataset_import.cancel() assert m.call_count == 2
def test_get_job(job_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(job_data)) c = Client() job = c.get_job('1234') assert job.id == '1234' assert m.called
def test_dataset_import_cancel_forbidden(dataset_import_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/', status_code=401) c = Client() c.username = '******' dataset_import = c.get_import('a1b2c3') with pytest.raises(ForbiddenError): dataset_import.cancel()
def test_list_dataset_imports_with_filters(): data = {'meta': {'next': None, 'total_count': 0}, 'objects': []} with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/dataset_imports/?private=False', text=json.dumps(data)) c = Client() c.list_imports(filters={'private': False}) assert m.called assert m.request_history[0].qs == {'private': ['false']}
def test_get_dataset(dataset_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data)) c = Client() dataset = c.get_dataset('a1b2c3') dataset.id assert m.called assert dataset.tags == dataset_data['tags'] assert dataset.credits is None
def test_dataset_make_private(dataset_data): with requests_mock.mock() as m: m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data)) m.patch('https://databasin.org/api/v1/datasets/a1b2c3/') c = Client() dataset = c.get_dataset('a1b2c3') dataset.make_private() assert m.call_count == 2 request_data = json.loads(m.request_history[1].text) assert request_data['private'] == True
def test_datasets_with_filter(): with requests_mock.mock() as m: data = {'meta': {'next': None, 'total_count': 0}, 'objects': []} m.get('https://databasin.org/api/v1/datasets/?private=False', text=json.dumps(data)) c = Client() datasets = c.list_datasets({'private': False}) list(datasets) assert m.called assert m.request_history[0].qs['private'] == ['false']
def test_temporary_file_upload(tmp_file_data): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': '1234'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/1234/', text=json.dumps(tmp_file_data)) c = Client() c._session.cookies['csrftoken'] = 'abcd' tmp_file = c.upload_temporary_file(StringIO('foo')) assert tmp_file.uuid == '1234' assert tmp_file.filename == '' assert m.call_count == 2
def test_import_netcdf_dataset_with_no_style(): f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' with pytest.raises(ValueError): c.import_netcdf_dataset('test.zip')
def test_temporary_file_upload_from_path(tmp_file_data): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': '1234'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/1234/', text=json.dumps(tmp_file_data)) with mock.patch.object(builtins, 'open', mock.mock_open(read_data='foo')) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' tmp_file = c.upload_temporary_file('/path/to/foo.txt') assert tmp_file.uuid == '1234' open_mock.assert_called_once_with('/path/to/foo.txt', 'rb') assert 'filename="foo.txt"' in str(m.request_history[0].body)
def test_my_datasets(): with requests_mock.mock() as m: data = {'meta': {'next': None, 'total_count': 0}, 'objects': []} m.get('https://databasin.org/api/v1/datasets/?owner_id=foo', text=json.dumps(data)) c = Client() c.username = '******' datasets = c.list_my_datasets() list(datasets) assert m.called assert m.request_history[0].qs['owner_id'] == ['foo']
def test_datasets_with_filter(): with requests_mock.mock() as m: data = { 'meta': {'next': None, 'total_count': 0}, 'objects': [] } m.get('https://databasin.org/api/v1/datasets/?private=False', text=json.dumps(data)) c = Client() datasets = c.list_datasets({'private': False}) list(datasets) assert m.called assert m.request_history[0].qs['private'] == ['false']
def test_login_no_redirect(): with requests_mock.mock() as m: m.get('https://databasin.org/redirect/') m.get('https://databasin.org/', cookies={'csrftoken': 'abcd'}) m.get(LOGIN_URL, cookies={'csrftoken': 'abcd'}) m.post( LOGIN_URL, headers={'Location': 'https://databasin.org/'}, cookies={'sessionid': 'asdf'}, status_code=302 ) c = Client() c.login('foo', 'bar') assert m.call_count == 2 assert not any(r.url for r in m.request_history if r.url == 'https://databasin.org/redirect/')
def test_my_datasets(): with requests_mock.mock() as m: data = { 'meta': {'next': None, 'total_count': 0}, 'objects': [] } m.get('https://databasin.org/api/v1/datasets/?owner_id=foo', text=json.dumps(data)) c = Client() c.username = '******' datasets = c.list_my_datasets() list(datasets) assert m.called assert m.request_history[0].qs['owner_id'] == ['foo']
def test_create_job(job_data): with requests_mock.mock() as m: m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}, status_code=201) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(job_data)) c = Client() job = c.create_job('foo_job', job_args={'foo': 'bar'}) assert job.id == '1234' assert m.call_count == 2 request_data = json.loads(m.request_history[0].text) assert request_data['job_name'] == 'foo_job' assert request_data['job_args'] == {'foo': 'bar'}
def test_login_no_redirect(): with requests_mock.mock() as m: m.get('https://databasin.org/redirect/') m.get('https://databasin.org/', cookies={'csrftoken': 'abcd'}) m.get(LOGIN_URL, cookies={'csrftoken': 'abcd'}) m.post(LOGIN_URL, headers={'Location': 'https://databasin.org/'}, cookies={'sessionid': 'asdf'}, status_code=302) c = Client() c.login('foo', 'bar') assert m.call_count == 2 assert not any(r.url for r in m.request_history if r.url == 'https://databasin.org/redirect/')
def test_create_job(job_data): with requests_mock.mock() as m: m.post( "https://databasin.org/api/v1/jobs/", headers={"Location": "https://databasin.org/api/v1/jobs/1234/"}, status_code=201, ) m.get("https://databasin.org/api/v1/jobs/1234/", text=json.dumps(job_data)) c = Client() job = c.create_job("foo_job", job_args={"foo": "bar"}) assert job.id == "1234" assert m.call_count == 2 request_data = json.loads(m.request_history[0].text) assert request_data["job_name"] == "foo_job" assert request_data["job_args"] == {"foo": "bar"}
def test_list_datasets(dataset_data): with requests_mock.mock() as m: data = { 'meta': {'next': None, 'total_count': 2}, 'objects': [ dataset_data, copy.copy(dataset_data) ] } data['objects'][1]['id'] = 'a1b2c4' m.get('https://databasin.org/api/v1/datasets/', text=json.dumps(data)) c = Client() datasets = c.list_datasets() assert len(datasets) == 2 datasets = list(datasets) assert len(datasets) == 2 assert datasets[0].id == 'a1b2c3' assert datasets[1].id == 'a1b2c4'