Example #1
0
def test_retry_logging(caplog: pytest.LogCaptureFixture) -> None:
    responses.add(responses.GET, "https://test.nil/api/info/", status=503)
    responses.add(responses.GET, "https://test.nil/api/info/", status=503)
    responses.add(responses.GET,
                  "https://test.nil/api/info/",
                  json={"foo": "bar"})
    client = DandiAPIClient("https://test.nil/api")
    assert client.get("/info/") == {"foo": "bar"}
    responses.assert_call_count("https://test.nil/api/info/", 3)
    assert (
        "dandi",
        logging.DEBUG,
        "GET https://test.nil/api/info/",
    ) in caplog.record_tuples
    assert (
        "dandi",
        logging.WARNING,
        "Retrying GET https://test.nil/api/info/",
    ) in caplog.record_tuples
    assert (
        "dandi",
        logging.INFO,
        "GET https://test.nil/api/info/ succeeded after 2 retries",
    ) in caplog.record_tuples
    assert ("dandi", logging.DEBUG, "Response: 200") in caplog.record_tuples
Example #2
0
    def test_objectstorage_is_reseted_token_ok(self):
        # 期限切れのトークンをセット
        test_container.identity_config.res = from_dict(
            data_class=service.ConohaIdentityres,
            data={
                'issued_at': '2020-11-14T16:23:00.262864',
                'expires': test_utils.iso_format_datetime_str(delta_days=-1),
                'id': 'old_testtoken'
            })
        responses.add(
            method=responses.POST,
            url=test_container.identity_config.request_url,
            json={
                'access': {
                    'token': {
                        'issued_at': '2020-11-14T16:23:00.262864',
                        'expires':
                        test_utils.iso_format_datetime_str(delta_days=1),
                        'id': 'new_testtoken',
                        'tenant': {},
                        'serviceCatalog': []
                    }
                }
            },
            status=200)
        responses.add(method=responses.PUT,
                      url=test_container.request_url,
                      status=200)

        test_container.create_container()

        self.assertEquals(test_container.identity_config.res.id,
                          'new_testtoken')
        responses.assert_call_count(test_container.identity_config.request_url,
                                    1)
Example #3
0
    def test_upload_ng(self):
        responses.add(
            method=responses.POST,
            url=test_container.identity_config.request_url,
            json={
                'access': {
                    'token': {
                        'issued_at': '9999-12-31T23:59:59.999999',
                        'expires':
                        test_utils.iso_format_datetime_str(delta_days=1),
                        'id': 'testtoken999999',
                        'tenant': {},
                        'serviceCatalog': []
                    }
                }
            },
            status=200)

        test_data = PostImage(image_extension="jpeg",
                              image_origin_name="test",
                              image_size=523)
        test_url = '{base_url}/{id}.jpeg'.format(
            base_url=test_container.request_url, id=test_data.id)

        responses.add(method=responses.PUT, url=test_url, status=403)
        with self.assertRaises(ConohaUploadException):
            test_data.save(image=test_utils.TEST_IMAGE)

        self.assertEquals(0, PostImage.objects.filter(id=test_data.id).count())

        responses.assert_call_count(test_url, 1)
def test_main_ok(arg_list):
    responses.add(responses.POST,
                  'https://api.poeditor.com/v2/projects/export',
                  status=200,
                  json={
                      'response': {
                          'status': 'success',
                          'message': 'OK'
                      },
                      'result': {
                          'url':
                          'https://api.poeditor.com/v2/' + 'download/file/' +
                          'b577a66ac39d82995debfabc016f855d'
                      }
                  })
    responses.add(
        responses.GET, 'https://api.poeditor.com/v2/download/' +
        'file/b577a66ac39d82995debfabc016f855d')
    with patch('builtins.open', mock_open()):
        assert 0 == poeditorexporter.main(arg_list)

    assert responses.assert_call_count(
        'https://api.poeditor.com/v2/projects/export', 3)
    assert responses.assert_call_count(
        'https://api.poeditor.com/v2/download/file/' +
        'b577a66ac39d82995debfabc016f855d', 3)
def test_download_validates_token_once(
        mocker,
        faker,
        validate_access_token_url,
        resource_server_granule_url):

    client_id = faker.password(length=22, special_chars=False)
    access_token = faker.password(length=40, special_chars=False)
    cfg = config_fixture(oauth_client_id=client_id)
    url = validate_access_token_url.format(
        token=access_token,
        client_id=client_id
    )

    responses.add(responses.POST, url, status=200)
    responses.add(responses.GET, resource_server_granule_url, status=200)
    responses.add(responses.GET, resource_server_granule_url, status=200)
    destination_file = mocker.Mock()

    response = download(cfg, resource_server_granule_url, access_token, None, destination_file)
    response = download(cfg, resource_server_granule_url, access_token, None, destination_file)

    assert response.status_code == 200
    assert responses.assert_call_count(url, 1) is True
    assert responses.assert_call_count(resource_server_granule_url, 2) is True
def test_client_retry_on_fail():
    responses.add(method=responses.POST, url=url, status=503)

    with pytest.raises(ApiException):
        client.detail_search(site_id='someid')

    responses.assert_call_count(url, 4)
    def test_run_with_wait(self):
        ht_task = HightouchRunSync()
        sync_id = 123
        responses.add(
            responses.POST,
            f"https://api.hightouch.io/api/v2/rest/run/{sync_id}",
            status=200,
            json={"message": "it works"},
        )
        responses.add(
            responses.GET,
            f"https://api.hightouch.io/api/v2/rest/sync/{sync_id}",
            status=200,
            json={"sync": {"sync_status": "success"}},
        )

        response = ht_task.run(api_key="key", sync_id=123, wait_for_completion=True)

        responses.assert_call_count(
            f"https://api.hightouch.io/api/v2/rest/run/{sync_id}", 1
        )
        responses.assert_call_count(
            f"https://api.hightouch.io/api/v2/rest/sync/{sync_id}", 1
        )

        assert responses.calls[0].request.headers["Authorization"] == "Bearer key"
        assert isinstance(response, dict)
Example #8
0
 def test_is_set_token_ng(self):
     responses.add(method=responses.POST,
                   url=test_container.identity_config.request_url,
                   status=403)
     with self.assertRaises(ConohaRequestsException):
         test_container.identity_config.set_token()
     responses.assert_call_count(test_container.identity_config.request_url,
                                 1)
Example #9
0
def test_request_upload_is_not_called_on_init(dataset: RemoteDataset, request_upload_endpoint: str):
    upload_handler = UploadHandler(dataset, [])

    assert upload_handler.pending_count == 0
    assert upload_handler.blocked_count == 0
    assert upload_handler.error_count == 0

    responses.assert_call_count(request_upload_endpoint, 0)
Example #10
0
    def test_not_secified_delete_object(self):
        responses.add(method=responses.DELETE,
                      url=test_container.request_url,
                      status=200)

        with self.assertRaisesMessage(
                expected_exception=ConohaNotSpecifiedException,
                expected_message='オブジェクト名が指定されていません'):
            test_container.delete_object(delete_object_name=' ')
        responses.assert_call_count(test_container.request_url, 0)
Example #11
0
def test_error_count_is_correct(dataset: RemoteDataset, request_upload_endpoint: str):
    request_upload_response = {
        "blocked_items": [],
        "items": [{"dataset_item_id": 1, "filename": "test.jpg", "path": "/"}],
    }

    sign_upload_endpoint = "http://localhost/api/dataset_items/1/sign_upload"
    upload_to_s3_endpoint = "https://darwin-data.s3.eu-west-1.amazonaws.com/test.jpg?X-Amz-Signature=abc"

    confirm_upload_endpoint = "http://localhost/api/dataset_items/1/confirm_upload"

    responses.add(responses.PUT, request_upload_endpoint, json=request_upload_response, status=200)
    responses.add(responses.GET, sign_upload_endpoint, status=500)

    local_file = LocalFile(local_path=Path("test.jpg"))
    upload_handler = UploadHandler(dataset, [local_file])

    upload_handler.upload()
    for file_to_upload in upload_handler.progress:
        file_to_upload()

    responses.assert_call_count(request_upload_endpoint, 1)
    responses.assert_call_count(sign_upload_endpoint, 1)
    responses.assert_call_count(upload_to_s3_endpoint, 0)
    responses.assert_call_count(confirm_upload_endpoint, 0)

    assert upload_handler.pending_count == 1
    assert upload_handler.error_count == 1
    assert upload_handler.blocked_count == 0

    error = upload_handler.errors[0]
    assert str(error.file_path) == "test.jpg"
    assert error.stage == UploadStage.REQUEST_SIGNATURE
Example #12
0
def test_upload_files(dataset: RemoteDataset, request_upload_endpoint: str):
    request_upload_response = {
        "blocked_items": [],
        "items": [{"dataset_item_id": 1, "filename": "test.jpg", "path": "/"}],
    }

    upload_to_s3_endpoint = "https://darwin-data.s3.eu-west-1.amazonaws.com/test.jpg?X-Amz-Signature=abc"
    confirm_upload_endpoint = "http://localhost/api/dataset_items/1/confirm_upload"

    sign_upload_endpoint = "http://localhost/api/dataset_items/1/sign_upload"
    sign_upload_response = {"upload_url": upload_to_s3_endpoint}

    responses.add(responses.PUT, request_upload_endpoint, json=request_upload_response, status=200)
    responses.add(responses.GET, sign_upload_endpoint, json=sign_upload_response, status=200)
    responses.add(responses.PUT, upload_to_s3_endpoint, status=201)
    responses.add(responses.PUT, confirm_upload_endpoint, status=200)

    Path("test.jpg").touch()
    local_file = LocalFile(local_path=Path("test.jpg"))
    upload_handler = UploadHandler(dataset, [local_file])

    upload_handler.upload()
    for file_to_upload in upload_handler.progress:
        file_to_upload()

    responses.assert_call_count(request_upload_endpoint, 1)
    responses.assert_call_count(sign_upload_endpoint, 1)
    responses.assert_call_count(upload_to_s3_endpoint, 1)
    responses.assert_call_count(confirm_upload_endpoint, 1)

    assert upload_handler.error_count == 0
Example #13
0
    def test_list_workspaces(self, mock_logger, mock_redis):
        user = User.objects.get(username='******')

        url = '{}{}'.format(TEST_TERRA_API_ROOT_URL, GET_WORKSPACE_PATH)
        mock_redis.return_value.get.return_value = None
        responses.add(
            responses.GET,
            url,
            status=200,
            body=
            '[{"public": false, "workspace": {"name": "1000 Genomes Demo", "namespace": "my-seqr-billing" }},'
            +
            '{"public": true,"workspace": {"name": "degenome","namespace": "degenome"}},'
            +
            '{"public": false,"workspace": {"name": "seqr-project 1000 Genomes Demo","namespace": "my-seqr-billing"}}]'
        )
        workspaces = list_anvil_workspaces(user)
        self.assertEqual(len(workspaces), 2)
        self.assertEqual(workspaces[1]['workspace']['namespace'],
                         'my-seqr-billing')
        mock_logger.info.assert_called_with(
            'GET https://terra.api/api/workspaces?fields=public,workspace.name,workspace.namespace 200 276',
            user)
        self.assertEqual(len(mock_logger.method_calls), 1)
        responses.assert_call_count(url, 1)

        mock_logger.reset_mock()
        responses.reset()
        mock_redis.return_value.get.return_value = '[{"workspace": {"name": "1000 Genomes Demo", "namespace": "my-seqr-billing" }},' +\
                   '{"workspace": {"name": "seqr-project 1000 Genomes Demo","namespace": "my-seqr-billing"}}]'
        workspaces = list_anvil_workspaces(user)
        self.assertEqual(len(workspaces), 2)
        self.assertEqual(workspaces[1]['workspace']['namespace'],
                         'my-seqr-billing')
        mock_logger.info.assert_called_with(
            'Terra API cache hit for: GET {} {}'.format(
                GET_WORKSPACE_PATH, user), user)
        mock_redis.return_value.get.assert_called_with(
            'terra_req__{}__{}'.format(user, GET_WORKSPACE_PATH))
        responses.assert_call_count(url, 0)  # no call to the Terra API

        mock_logger.reset_mock()
        mock_redis.return_value.get.return_value = None
        responses.add(responses.GET, url, status=401)
        with self.assertRaises(TerraAPIException) as ec:
            _ = list_anvil_workspaces(user)
        self.assertEqual(
            str(ec.exception),
            'Error: called Terra API: GET /api/workspaces?fields=public,workspace.name,workspace.namespace got status: 401 with a reason: Unauthorized'
        )
Example #14
0
def test_long_retry_not_required(api, caplog):
    '''Some 502 errors do not require a long delay'''
    responses.add(responses.Response(
        method='POST',
        url=api._url,
        status=502,
    ))

    caplog.set_level(logging.DEBUG, logger='cirrus_run')

    time_start = time()
    with pytest.raises(CirrusHTTPError):
        api('fake query text')
    time_end = time()

    assert time_end - time_start > api.RETRY_DELAY * 3
    assert time_end - time_start < api.RETRY_LONG_DELAY + api.RETRY_DELAY * 2

    long_delay_log_message_count = 0
    for record in caplog.records:
        if 'API server asked for longer retry delay' in record.message:
            long_delay_log_message_count += 1
    assert long_delay_log_message_count == 0

    assert responses.assert_call_count(api._url, 1 + 3), \
           'Incorrect number of _post calls before raising CirrusHTTPError'
Example #15
0
def test_long_retry_delay_required(api, caplog):
    '''Wait out intermittent API server errors'''
    responses.add(
        responses.Response(
            method='POST',
            url=api._url,
            status=502,
            body=
            'The server encountered a temporary error and could not complete your request. Please try again in 30 seconds.',
        ))

    caplog.set_level(logging.DEBUG, logger='cirrus_run')

    time_start = time()
    with pytest.raises(CirrusHTTPError):
        api('fake query text')
    time_end = time()

    assert time_end - time_start > api.RETRY_LONG_DELAY + api.RETRY_DELAY * 2
    assert time_end - time_start < api.RETRY_LONG_DELAY * 3

    long_delay_log_message_count = 0
    for record in caplog.records:
        if 'API server asked for longer retry delay' in record.message:
            long_delay_log_message_count += 1
    assert long_delay_log_message_count == 1

    assert responses.assert_call_count(api._url, 1 + 3), \
           'Incorrect number of _post calls before raising CirrusHTTPError'
Example #16
0
def test_long_retry_internal_server_error_unrecoverable(api, caplog):
    '''Retry GraphQL internal server error - unrecoverable'''
    caplog.set_level(logging.DEBUG, logger='cirrus_run')
    for params in [
        {
            'status': 200,
            'json': {
                'errors': [{
                    'locations': [],
                    'message':
                    'Internal Server Error(s) while executing query'
                }]
            }
        },
    ]:
        responses.add(responses.Response(method='POST', url=api._url,
                                         **params))

    time_start = time()
    with pytest.raises(CirrusAPIError):
        reply = api('fake query text', delay=0)
    time_end = time()

    assert responses.assert_call_count(api._url, 1 + 3)

    assert time_end - time_start > api.RETRY_DELAY * 3
    assert time_end - time_start < api.RETRY_LONG_DELAY * 2 + api.RETRY_DELAY

    long_delay_log_message_count = 0
    for record in caplog.records:
        if 'API server asked for longer retry delay' in record.message:
            long_delay_log_message_count += 1
    assert long_delay_log_message_count == 1
Example #17
0
def test_dagster_telemetry_upload(env):
    logger = logging.getLogger("dagster_telemetry_logger")
    for handler in logger.handlers:
        logger.removeHandler(handler)

    with environ(env):
        with instance_for_test(enable_telemetry=True):
            runner = CliRunner()
            with pushd(path_to_file("")):
                pipeline_attribute = "foo_pipeline"
                runner.invoke(
                    pipeline_execute_command,
                    [
                        "-f",
                        path_to_file("test_cli_commands.py"), "-a",
                        pipeline_attribute
                    ],
                )

            mock_stop_event = mock.MagicMock()
            mock_stop_event.is_set.return_value = False

            def side_effect():
                mock_stop_event.is_set.return_value = True

            mock_stop_event.wait.side_effect = side_effect

            upload_logs(mock_stop_event)
            assert responses.assert_call_count(DAGSTER_TELEMETRY_URL, 1)
Example #18
0
    def test_invalid_url(self) -> None:
        url = "http://test.org/"
        error_url = "http://test.org/x"
        with mock_queue_publish("zerver.lib.actions.queue_json_publish"):
            msg_id = self.send_personal_message(
                self.example_user("hamlet"),
                self.example_user("cordelia"),
                content=error_url,
            )
        msg = Message.objects.select_related("sender").get(id=msg_id)
        event = {
            "message_id": msg_id,
            "urls": [error_url],
            "message_realm_id": msg.sender.realm_id,
            "message_content": error_url,
        }

        self.create_mock_response(error_url, status=404)
        with self.settings(TEST_SUITE=False, CACHES=TEST_CACHES):
            with self.assertLogs(level="INFO") as info_logs:
                FetchLinksEmbedData().consume(event)
            self.assertTrue(
                "INFO:root:Time spent on get_link_embed_data for http://test.org/x: "
                in info_logs.output[0])
            cached_data = link_embed_data_from_cache(error_url)

        # FIXME: Should we really cache this, especially without cache invalidation?
        self.assertIsNone(cached_data)
        msg.refresh_from_db()
        self.assertEqual(
            '<p><a href="http://test.org/x">http://test.org/x</a></p>',
            msg.rendered_content)
        self.assertTrue(responses.assert_call_count(url, 0))
Example #19
0
        def wrapped_queue_json_publish(*args: Any, **kwargs: Any) -> None:
            self.create_mock_response(original_url)
            self.create_mock_response(edited_url)

            with self.settings(TEST_SUITE=False, CACHES=TEST_CACHES):
                with self.assertLogs(level="INFO") as info_logs:
                    # Run the queue processor. This will simulate the event for original_url being
                    # processed after the message has been edited.
                    FetchLinksEmbedData().consume(event)
            self.assertTrue(
                "INFO:root:Time spent on get_link_embed_data for http://test.org/: "
                in info_logs.output[0])
            msg = Message.objects.select_related("sender").get(id=msg_id)
            # The content of the message has changed since the event for original_url has been created,
            # it should not be rendered. Another, up-to-date event will have been sent (edited_url).
            self.assertNotIn(
                f'<a href="{original_url}" title="The Rock">The Rock</a>',
                msg.rendered_content)

            self.assertTrue(responses.assert_call_count(edited_url, 0))

            with self.settings(TEST_SUITE=False, CACHES=TEST_CACHES):
                with self.assertLogs(level="INFO") as info_logs:
                    # Now proceed with the original queue_json_publish and call the
                    # up-to-date event for edited_url.
                    queue_json_publish(*args, **kwargs)
                    msg = Message.objects.select_related("sender").get(
                        id=msg_id)
                    self.assertIn(
                        f'<a href="{edited_url}" title="The Rock">The Rock</a>',
                        msg.rendered_content,
                    )
            self.assertTrue(
                "INFO:root:Time spent on get_link_embed_data for http://edited.org/: "
                in info_logs.output[0])
Example #20
0
    def test_refresh_successful(self, authentication_service, endpoint):
        responses.add(responses.POST,
                      endpoint,
                      json={
                          'access_token': ACCESS_TOKEN,
                          'refresh_token': REFRESH_TOKEN,
                          'scope': SCOPE,
                          'token_type': TOKEN_TYPE,
                          'expires_in': EXPIRES_IN
                      },
                      status=200)

        # act
        authentication_service.authenticate()  # authenticate first
        auth_data = authentication_service.refresh()  # refresh

        # assert
        assert type(auth_data) == dict
        assert authentication_service._access_token == ACCESS_TOKEN
        assert authentication_service._refresh_token == REFRESH_TOKEN
        assert authentication_service._scope == SCOPE
        assert authentication_service._token_type == TOKEN_TYPE
        assert authentication_service._expires_at != None
        assert responses.calls[
            0].request.body == f'grant_type=client_credentials&client_id={CLIENT_ID}&client_secret={CLIENT_SECRET}'
        assert responses.calls[
            1].request.body == f'grant_type=refresh_token&refresh_token={REFRESH_TOKEN}'
        assert responses.assert_call_count(endpoint, 2) is True
Example #21
0
def test_watch(get_mock_job):
    incomplete_job = get_mock_job()
    complete_job = Job.from_dict(incomplete_job.to_dict())
    complete_job.status_code = 'SUCCEEDED'
    api = HyP3()
    for ii in range(3):
        responses.add(responses.GET,
                      urljoin(api.url, f'/jobs/{incomplete_job.job_id}'),
                      json=incomplete_job.to_dict())
    responses.add(responses.GET,
                  urljoin(api.url, f'/jobs/{incomplete_job.job_id}'),
                  json=complete_job.to_dict())
    response = api.watch(incomplete_job, interval=0.05)
    assert response == complete_job
    responses.assert_call_count(
        urljoin(api.url, f'/jobs/{incomplete_job.job_id}'), 4)
Example #22
0
def test_fb_message(mocker, monkeypatch, user_id, text, token):
    monkeypatch.setattr(utils, "FB_PAGE_TOKEN", value=token)
    url = f"{utils.FB_GRAPH_API}access_token={token}"
    responses.add(responses.POST, url, status=200, json="mocked")
    utils.fb_message(user_id, text)
    assert responses.assert_call_count(url, 1)
    monkeypatch.undo()
Example #23
0
def test_recoverable_api_error(api):
    '''Check handling of interminent api errors'''
    for params in [
        {
            'status': 200,
            'json': {
                'errors': ['fake error message']
            },
        },
        {
            'status': 200,
            'json': {
                'errors': ['fake error message']
            },
        },
        {
            'status': 200,
            'json': {
                'data': {
                    'hello': 'world'
                }
            }
        },
    ]:
        responses.add(responses.Response(method='POST', url=api._url,
                                         **params))
    reply = api('fake query text', delay=0)
    assert reply == {'hello': 'world'}
    assert responses.assert_call_count(api._url, 3)
Example #24
0
def test_dagster_telemetry_upload(env):
    logger = logging.getLogger("dagster_telemetry_logger")
    for handler in logger.handlers:
        logger.removeHandler(handler)

    responses.add(responses.POST, DAGSTER_TELEMETRY_URL)

    with instance_for_test(overrides={"telemetry": {"enabled": True}}):
        with environ(env):
            runner = CliRunner()
            with pushd(path_to_file("")):
                pipeline_attribute = "foo_pipeline"
                runner.invoke(
                    pipeline_execute_command,
                    [
                        "-f",
                        path_to_file("test_cli_commands.py"), "-a",
                        pipeline_attribute
                    ],
                )

            mock_stop_event = mock.MagicMock()
            mock_stop_event.is_set.return_value = False

            def side_effect(_):
                mock_stop_event.is_set.return_value = True

            mock_stop_event.wait.side_effect = side_effect

            # Needed to avoid file contention issues on windows with the telemetry log file
            cleanup_telemetry_logger()

            upload_logs(mock_stop_event, raise_errors=True)
            assert responses.assert_call_count(DAGSTER_TELEMETRY_URL, 1)
Example #25
0
    def test_run_with_continue_waiting(self, caplog):
        api_url = "https://test.cubecloud.dev/cubejs-api/v1/load"
        cubejs_task = CubeJSQueryTask()

        responses.add(
            responses.GET,
            api_url,
            status=200,
            json={"error": "Continue wait"},
        )

        responses.add(
            responses.GET,
            api_url,
            status=200,
            json={"data": "result"},
        )

        data = cubejs_task.run(subdomain="test",
                               api_secret="foo",
                               query={"measures": "count"})

        expected_url = api_url + "?query=" + quote_plus(
            '{"measures": "count"}')

        assert responses.assert_call_count(expected_url, 2) is True
        assert isinstance(data, dict)
Example #26
0
def test_retrieve_token_with_password(rok_connector, rok_ds):
    """check that we correctly retrieve the rok token using a passord"""

    # This is the data returned by ROK, a token encrypted with the shared secret
    auth_query = """
        query Auth($database: String!, $user: String!, $password: String!)
        {authenticate(database: $database, user: $user, password: $password)}"""
    auth_vars = {
        'database': rok_ds.database,
        'user': rok_connector.username,
        'password': rok_connector.password,
    }
    # Mocks the response we wait from ROK password authentication API
    responses.add(
        method=responses.POST,
        url='http://bla.bla',
        json={'data': {
            'authenticate': 'rok_token'
        }},
    )
    rok_connector.retrieve_token_with_password(rok_ds.database,
                                               endpoint='http://bla.bla')

    assert responses.assert_call_count('http://bla.bla', 1) is True
    assert JsonWrapper.loads(responses.calls[0].request.body) == {
        'query': auth_query,
        'variables': auth_vars,
    }
Example #27
0
    def test_get_instances_by_status_successful(self, instances_service,
                                                endpoint):
        # arrange - add response mock
        url = endpoint + "?status=running"
        responses.add(responses.GET, url, json=PAYLOAD, status=200)

        # act
        instances = instances_service.get(status='running')

        # assert
        assert type(instances) == list
        assert len(instances) == 1
        assert type(instances[0]) == Instance
        assert type(instances[0].ssh_key_ids) == list
        assert instances[0].id == INSTANCE_ID
        assert instances[0].ssh_key_ids == [SSH_KEY_ID]
        assert instances[0].status == INSTANCE_STATUS
        assert instances[0].image == INSTANCE_IMAGE
        assert instances[0].instance_type == INSTANCE_TYPE
        assert instances[0].price_per_hour == INSTANCE_PRICE_PER_HOUR
        assert instances[0].location == INSTANCE_LOCATION
        assert instances[0].description == INSTANCE_DESCRIPTION
        assert instances[0].hostname == INSTANCE_HOSTNAME
        assert instances[0].ip == INSTANCE_IP
        assert instances[0].created_at == INSTANCE_CREATED_AT
        assert type(instances[0].cpu) == dict
        assert type(instances[0].gpu) == dict
        assert type(instances[0].memory) == dict
        assert type(instances[0].storage) == dict
        assert responses.assert_call_count(url, 1) is True
Example #28
0
    def it_works(darwin_client: Client, dataset_name: str, dataset_slug: str,
                 team_slug: str, files_content: dict):
        remote_dataset = RemoteDataset(client=darwin_client,
                                       team=team_slug,
                                       name=dataset_name,
                                       slug=dataset_slug,
                                       dataset_id=1)
        url = "http://localhost/api/datasets/1/items?page%5Bsize%5D=500"
        responses.add(
            responses.POST,
            url,
            json=files_content,
            status=200,
        )

        actual = remote_dataset.fetch_remote_files()

        assert isinstance(actual, types.GeneratorType)

        (item_1, item_2) = list(actual)

        assert responses.assert_call_count(url, 1) is True

        assert item_1.id == 386074
        assert item_2.id == 386073
Example #29
0
    def test_add_service_account(self):
        user = User.objects.get(username='******')

        url = '{}api/workspaces/my-seqr-billing/my-seqr-workspace/acl'.format(
            TEST_TERRA_API_ROOT_URL)
        responses.add(
            responses.GET,
            url,
            status=200,
            body=
            '{{"acl": {{"{}": {{"accessLevel": "READER","canCompute": false,"canShare": false,"pending": false}} }} }}'
            .format(TEST_SERVICE_ACCOUNT))
        r = add_service_account(user, 'my-seqr-billing', 'my-seqr-workspace')
        self.assertFalse(r)
        self.assertEqual(responses.calls[0].request.url, url)
        responses.assert_call_count(url, 1)

        responses.reset()
        responses.add(responses.GET, url, status=200, body='{"acl": {}}')
        responses.add(responses.PATCH,
                      url,
                      status=200,
                      body='{{"usersUpdated": [{{"email": "{}" }}]}}'.format(
                          TEST_SERVICE_ACCOUNT))
        r = add_service_account(user, 'my-seqr-billing', 'my-seqr-workspace')
        self.assertTrue(r)
        responses.assert_call_count(url, 2)
        self.assertEqual(responses.calls[0].request.method, responses.GET)
        self.assertEqual(responses.calls[1].request.method, responses.PATCH)
        self.assertEqual(
            responses.calls[1].request.body,
            '[{"email": "*****@*****.**", "accessLevel": "READER", "canShare": false, "canCompute": false}]'
        )

        responses.replace(responses.PATCH,
                          url,
                          status=200,
                          body='{"usersUpdated": []}')
        with self.assertRaises(TerraAPIException) as te:
            _ = add_service_account(user, 'my-seqr-billing',
                                    'my-seqr-workspace')
        self.assertEqual(
            str(te.exception),
            'Failed to grant seqr service account access to the workspace my-seqr-billing/my-seqr-workspace'
        )
Example #30
0
 def test_write_package_with_force(self):
     self.package.to_ckan(
         base_url=self.base_url,
         dataset_id=self.dataset_id,
         api_key="env:CKAN_API_KEY",
         force=True,
     )
     assert responses.assert_call_count(
         f"{self.base_url}/api/3/action/datastore_upsert", 1)