def test_activities_with_valid_coordinates(): runner = CliRunner() make_successful_login(runner) site_uuid = str(uuid.uuid4()) coords_ra = 2.33 coords_dec = 4.55 def request_callback(request, uri, response_headers): data = json.loads(request.body) assert data.get('observing_site') == site_uuid assert data.get('coordinates') == { 'right_ascension': coords_ra, 'declination': coords_dec } return [200, response_headers, json.dumps({"result": "OK"})] httpretty.register_uri(httpretty.POST, ARCSECOND_API_URL_DEV + '/activities/', body=request_callback) coords = "{},{}".format(coords_ra, coords_dec) result = runner.invoke(cli.activities, [ 'create', '--observing_site', site_uuid, '--coordinates', coords, '--debug', '--test' ]) assert result.exit_code == 0 and not result.exception
def test_datafiles_upload_file_create_threaded_with_callback(): # Using standard CLI runner to make sure we login successfuly as in other tests. runner = CliRunner() make_successful_login(runner) # has_callback_been_called = False # Do NOT declare var despite what IDE says dataset_uuid = uuid.uuid4() httpretty.register_uri(httpretty.POST, ARCSECOND_API_URL_DEV + '/datasets/' + str(dataset_uuid) + '/datafiles/', status=201, body='{"result": "OK"}') def upload_callback(eventName, progress): print(eventName, progress, flush=True) global has_callback_been_called has_callback_been_called = True fixtures_folder = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'fixtures') # Go for Python module tests datafiles_api = ArcsecondAPI.datafiles(dataset=str(dataset_uuid), debug=True, test=True) payload = {'file': os.path.join(fixtures_folder, 'file1.fits')} uploader, _ = datafiles_api.create(payload, callback=upload_callback) uploader.start() while uploader.is_alive(): pass print(f'is alive? {uploader.is_alive()}', flush=True) results, error = uploader.finish() assert results is not None assert error is None assert has_callback_been_called is True
def test_organisation_GET_datasets_list_valid_role(self): """As a SAAO member, I must be able to access the list of datasets.""" runner = CliRunner() make_successful_login(runner, 'saao', 'member') mock_http_get('/saao/datasets/', '[]') result = runner.invoke(cli.datasets, ['--organisation', 'saao', '--debug', '--test']) assert result.exit_code == 0 and not result.exception
def test_datasets_list_unlogged(self): """As a simple user, I must not be able to access the list of datasets of an organisation.""" runner = CliRunner() make_successful_login(runner) result = runner.invoke(cli.datasets, ['--organisation', 'saao', '--debug', '--test']) assert result.exit_code != 0 and isinstance(result.exception, ArcsecondError)
def test_organisation_POST_datasets_list_valid_member_role(self): """As a SAAO superadmin, I must be able to create a dataset.""" runner = CliRunner() make_successful_login(runner, 'saao', 'member') mock_http_post('/saao/datasets/', '[]') result = runner.invoke( cli.datasets, ['create', '--organisation', 'saao', '--debug', '--test']) assert result.exit_code == 0 and not result.exception
def test_organisation_GET_datasets_list_logged_but_wrong_organisation( self): """No matter role I have, accessing an unknown organisation must fail.""" runner = CliRunner() make_successful_login(runner, 'saao', 'superadmin') result = runner.invoke( cli.datasets, ['--organisation', 'dummy', '--debug', '--test']) assert result.exit_code != 0 and isinstance(result.exception, ArcsecondError)
def test_activities_with_invalid_coordinates3(): runner = CliRunner() make_successful_login(runner) site_uuid = str(uuid.uuid4()) coords_ra = 2.33 coords = "yoyo,{}".format(coords_ra) result = runner.invoke(cli.activities, [ 'create', '--observing_site', site_uuid, '--coordinates', coords, '--debug', '--test' ]) assert result.exit_code != 0 assert isinstance(result.exception, ArcsecondInputValueError)
def test_empty_datasets_list(): runner = CliRunner() make_successful_login(runner) httpretty.register_uri(httpretty.GET, ARCSECOND_API_URL_DEV + '/datasets/', status=200, body='[]') result = runner.invoke(cli.datasets, ['--debug', '--test']) assert result.exit_code == 0 and not result.exception data = json.loads(result.output) assert len(data) == 0 and isinstance(data, list)
def test_datafiles_create_with_file(): runner = CliRunner() make_successful_login(runner) dataset_uuid = uuid.uuid4() httpretty.register_uri(httpretty.POST, ARCSECOND_API_URL_DEV + '/datasets/' + str(dataset_uuid) + '/datafiles/', status=201, body='{"result": "OK"}') result = runner.invoke(cli.datafiles, [ str(dataset_uuid), 'create', '--file', os.path.abspath(__file__), '--debug', '--test' ]) assert result.exit_code == 0 and not result.exception data = json.loads(result.output) assert data['result'] == 'OK'
def test_datafiles_upload_file_threaded_no_callback(): runner = CliRunner() make_successful_login(runner) dataset_uuid = uuid.uuid4() httpretty.register_uri(httpretty.POST, ARCSECOND_API_URL_DEV + '/datasets/' + str(dataset_uuid) + '/datafiles/', status=201, body='{"result": "OK"}') # Go for Python module tests datafiles_api = ArcsecondAPI.datafiles(dataset=str(dataset_uuid), debug=True, test=True) uploader, _ = datafiles_api.create({'file': os.path.abspath(__file__)}) uploader.start() time.sleep(0.1) results, error = uploader.finish() assert results is not None assert error is None
def test_datafiles_upload_file_update_threaded_with_callback(): runner = CliRunner() make_successful_login(runner) # has_callback_been_called = False # Do NOT declare var despite what IDE says dataset_uuid = uuid.uuid4() filename = 'jupiter99.fits' httpretty.register_uri( httpretty.PATCH, f'{ARCSECOND_API_URL_DEV}/datasets/{str(dataset_uuid)}/datafiles/{filename}/', status=200, body='{"file": "amazon.com..."}') def upload_callback(eventName, progress): print(eventName, progress, flush=True) global has_callback_been_called has_callback_been_called = True fixtures_folder = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'fixtures') # Go for Python module tests datafiles_api = ArcsecondAPI.datafiles(dataset=str(dataset_uuid), debug=True, test=True) payload = {'file': os.path.join(fixtures_folder, 'file1.fits')} uploader, _ = datafiles_api.update(filename, payload, callback=upload_callback) uploader.start() while uploader.is_alive(): pass print(f'is alive? {uploader.is_alive()}', flush=True) results, error = uploader.finish() assert results is not None assert error is None assert has_callback_been_called is True