async def test_set_status_success_via_skip_issue_label(action, monkeypatch): monkeypatch.setattr(bpo, '_validate_issue_number', mock.AsyncMock(return_value=True)) data = { "action": action, "pull_request": { "statuses_url": "https://api.github.com/blah/blah/git-sha", "title": "No issue in title", "issue_url": "issue URL", }, } issue_data = { "labels": [ {"name": "skip issue"}, ] } event = sansio.Event(data, event="pull_request", delivery_id="12345") gh = FakeGH(getitem=issue_data) await bpo.router.dispatch(event, gh, session=None) status = gh.post_data[0] assert status["state"] == "success" assert status["context"] == "bedevere/issue-number" assert "git-sha" in gh.post_url[0] bpo._validate_issue_number.assert_not_awaited()
async def test_shell(self, rs_patch): """Unit tests for !shell command.""" # async def shell(self, ctx) shell = self.cog.shell rs_patch().interact = mock.AsyncMock() rs_patch.reset_mock() # Okay ctx = mock_discord.get_ctx(shell) await ctx.invoke() rs_patch.assert_called_once_with(ctx.channel, mock.ANY) self.assertEqual(rs_patch.call_args.args[1]["ctx"], ctx) ctx.assert_sent() rs_patch.reset_mock() # Exit rs_patch().interact.side_effect = blocs.realshell.RealShellExit rs_patch.reset_mock() ctx = mock_discord.get_ctx(shell) with self.assertRaises(blocs.tools.CommandExit): await ctx.invoke() rs_patch.assert_called_once_with(ctx.channel, mock.ANY) self.assertEqual(rs_patch.call_args.args[1]["ctx"], ctx) ctx.assert_sent()
def AsyncMock(*args, **kwargs): if sys.version_info >= (3, 8): return mock.AsyncMock(*args, **kwargs) else: return __import__("mock").AsyncMock(*args, **kwargs)
async def test_end_to_end_send_only_timeseries_data(): response = mock.Mock() response.status_code = 200 post_mock = mock.AsyncMock(return_value=response) with mock.patch( "hetdesrun.adapters.generic_rest.send_framelike.get_generic_rest_adapter_base_url", return_value="https://hetida.de", ): with mock.patch( "hetdesrun.adapters.generic_rest.send_ts_data.AsyncClient.post", new=post_mock, ): ts_1 = pd.Series( [1.2, 3.4, 5.9], index=pd.to_datetime([ "2020-01-15T00:00:00.000Z", "2020-01-15T01:00:00.000Z", "2020-01-15T02:00:00.000Z", ]), ) # one timeseries await send_data( { "inp_1": FilteredSink(ref_id="sink_id_1", type="timeseries(float)", filters={}) }, {"inp_1": ts_1}, adapter_key= "test_end_to_end_send_only_timeseries_data_adapter_key", ) assert post_mock.called # we got through to actually posting! func_name, args, kwargs = post_mock.mock_calls[0] assert kwargs["params"] == [("timeseriesId", "sink_id_1")] assert kwargs["json"] == [ { "timestamp": "2020-01-15T00:00:00.000000000Z", "value": 1.2 }, { "timestamp": "2020-01-15T01:00:00.000000000Z", "value": 3.4 }, { "timestamp": "2020-01-15T02:00:00.000000000Z", "value": 5.9 }, ] # more than one timeseries ts_2 = pd.Series( ["first", "second"], index=pd.to_datetime([ "2020-01-15T00:00:00.000Z", "2020-01-15T01:00:00.000Z", ]), ) await send_data( { "inp_1": FilteredSink(ref_id="sink_id_1", type="timeseries(float)", filters={}), "inp_2": FilteredSink( ref_id="sink_id_2", type=ExternalType.TIMESERIES_STR, filters={}, ), }, { "inp_1": ts_1, "inp_2": ts_2, }, adapter_key= "test_end_to_end_send_only_timeseries_data_adapter_key", ) # note: can be async! func_name_1, args_1, kwargs_1 = post_mock.mock_calls[1] func_name_2, args_2, kwargs_2 = post_mock.mock_calls[2] assert (len(kwargs_1["json"]) == 3) or (len(kwargs_2["json"]) == 3) assert (len(kwargs_1["json"]) == 2) or (len(kwargs_2["json"]) == 2) # a timeseries with attributes ts = pd.Series( [1.2, 3.4, np.nan], index=pd.to_datetime([ "2020-01-15T00:00:00.000Z", "2020-01-15T01:00:00.000Z", "2020-01-15T02:00:00.000Z", ]), ) ts_1_attrs = {"a": 1} ts_1.attrs = ts_1_attrs await send_data( { "inp_1": FilteredSink(ref_id="sink_id_1", type="timeseries(float)", filters={}), }, {"inp_1": ts_1}, adapter_key= "test_end_to_end_send_only_timeseries_data_adapter_key", ) # note: can be async! func_name_3, args_3, kwargs_3 = post_mock.mock_calls[3] assert "Data-Attributes" in kwargs_3["headers"] received_attrs = decode_attributes( kwargs_3["headers"]["Data-Attributes"]) for key, value in ts_1_attrs.items(): key in received_attrs assert received_attrs[key] == value
async def test_worker_expanded_events( extract_pull_numbers_from_event, aget_client, run_engine, _, redis_stream, redis_cache, logger_checker, ): client = mock.Mock( name="foo", owner="owner", repo="repo", auth=mock.Mock(installation={"id": 12345}, owner="owner", repo="repo", owner_id=123), ) client.__aenter__ = mock.AsyncMock(return_value=client) client.__aexit__ = mock.AsyncMock() client.items.return_value = mock.AsyncMock() aget_client.return_value = client extract_pull_numbers_from_event.return_value = [123, 456, 789] await worker.push( redis_stream, 123, "owner", "repo", 123, "pull_request", {"payload": "whatever"}, ) await worker.push( redis_stream, 123, "owner", "repo", None, "comment", {"payload": "foobar"}, ) assert 1 == (await redis_stream.zcard("streams")) assert 1 == len(await redis_stream.keys("stream~*")) assert 2 == (await redis_stream.xlen("stream~owner~123")) await run_worker() # Check redis is empty assert 0 == (await redis_stream.zcard("streams")) assert 0 == len(await redis_stream.keys("stream~*")) assert 0 == len(await redis_stream.hgetall("attempts")) # Check engine have been run with expect data assert 3 == len(run_engine.mock_calls) assert run_engine.mock_calls[0] == mock.call( InstallationMatcher(owner="owner"), "repo", 123, [ { "event_type": "pull_request", "data": { "payload": "whatever" }, "timestamp": mock.ANY, }, { "event_type": "comment", "data": { "payload": "foobar" }, "timestamp": mock.ANY, }, ], ) assert run_engine.mock_calls[1] == mock.call( InstallationMatcher(owner="owner"), "repo", 456, [ { "event_type": "comment", "data": { "payload": "foobar" }, "timestamp": mock.ANY, }, ], ) assert run_engine.mock_calls[2] == mock.call( InstallationMatcher(owner="owner"), "repo", 789, [ { "event_type": "comment", "data": { "payload": "foobar" }, "timestamp": mock.ANY, }, ], )
async def test_end_to_end_send_only_metadata_data(): response = mock.Mock() response.status_code = 200 post_mock = mock.AsyncMock(return_value=response) with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.get_generic_rest_adapter_base_url", return_value="https://hetida.de", ): with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.httpx.AsyncClient.post", new=post_mock, ): # more than one await send_data( { "outp_1": FilteredSink( ref_id="th_node_id", type="metadata(string)", ref_id_type="THINGNODE", ref_key="description", filters={}, ), "outp_2": FilteredSink( ref_id="sink_id", type="metadata(float)", ref_id_type="SINK", ref_key="upper_lim", filters={}, ), }, { "outp_1": "some description", "outp_2": 47.8 }, adapter_key= "test_end_to_end_send_only_metadata_data_adapter_key", ) func_name_1, args_1, kwargs_1 = post_mock.mock_calls[0] func_name_2, args_2, kwargs_2 = post_mock.mock_calls[1] assert ({ "key": "description", "value": "some description", "dataType": "string", }) in [kwargs_1["json"], kwargs_2["json"]] assert ({ "key": "upper_lim", "value": 47.8, "dataType": "float", }) in [kwargs_1["json"], kwargs_2["json"]] assert "https://hetida.de/thingNodes/th_node_id/metadata/description" in [ args_1[0], args_2[0], ] assert "https://hetida.de/sinks/sink_id/metadata/upper_lim" in [ args_1[0], args_2[0], ]
async def test_wiring_with_generic_rest_input( input_json_with_wiring_with_input, async_test_client): async with async_test_client as client: json_with_wiring = deepcopy(input_json_with_wiring_with_input) json_with_wiring["workflow_wiring"]["input_wirings"] = [{ "workflow_input_name": "val_inp", "adapter_id": "gen_rest_adapter_test_id", "ref_id": "thing_node_id", "ref_id_type": "THINGNODE", "ref_key": "number", "type": "metadata(int)", }] ww = WorkflowWiring(**json_with_wiring["workflow_wiring"]) resp_mock = mock.Mock() resp_mock.status_code = 200 resp_mock.json = mock.Mock(return_value={ "key": "number", "value": 32, "dataType": "int" }) with mock.patch( "hetdesrun.adapters.generic_rest.load_metadata.get_generic_rest_adapter_base_url", return_value="https://hetida.de", ): with mock.patch( "hetdesrun.adapters.generic_rest.load_metadata.httpx.AsyncClient.get", return_value=resp_mock, ) as mocked_async_client_get: status_code, output = await run_workflow_with_client( json_with_wiring, client) assert status_code == 200 node_results = output["node_results"] assert "32.0" in node_results # intermediate result assert "64.0" in node_results # now add sending metadata from the only output json_with_wiring["workflow_wiring"]["output_wirings"] = [{ "workflow_output_name": "z", "adapter_id": "gen_rest_adapter_test_id", "ref_id": "thing_node_id", "ref_id_type": "THINGNODE", "ref_key": "limit", "type": "metadata(float)", }] with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.get_generic_rest_adapter_base_url", return_value="https://hetida.de", ): response = mock.Mock() response.status_code = 200 send_metadata_post_mock = mock.AsyncMock( return_value=response) with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.post_json_with_open_client", new=send_metadata_post_mock, ): status_code, output = await run_workflow_with_client( json_with_wiring, client) # what gets into the post request sent from send_metadata: func_name, args, kwargs = send_metadata_post_mock.mock_calls[ 0] assert kwargs["json_payload"] == ({ "key": "limit", "value": 64.0, "dataType": "float" }) assert ( kwargs["url"] == "https://hetida.de/thingNodes/thing_node_id/metadata/limit" )
async def test_cog_check(self, role_check): """Role check is called with `MODERATION_ROLES`""" role_check.return_value.predicate = mock.AsyncMock() await self.cog.cog_check(self.ctx) role_check.assert_called_once_with(*(1, 2, 3)) role_check.return_value.predicate.assert_awaited_once_with(self.ctx)
async def test_get_mergify_config_location_from_cache( redis_cache: utils.RedisCache, ) -> None: client = mock.AsyncMock() client.auth.owner = "foo" client.item.side_effect = [ http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] installation = context.Installation( github_types.GitHubAccountIdType(0), github_types.GitHubLogin("foo"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository( installation, github_types.GitHubRepositoryName("bar"), github_types.GitHubRepositoryIdType(0), ) await repository.get_mergify_config_file() assert client.item.call_count == 3 client.item.assert_has_calls([ mock.call("/repos/foo/bar/contents/.mergify.yml", params={}), mock.call("/repos/foo/bar/contents/.mergify/config.yml", params={}), mock.call("/repos/foo/bar/contents/.github/mergify.yml", params={}), ]) client.item.reset_mock() client.item.side_effect = [ github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] repository._cache = context.RepositoryCache() await repository.get_mergify_config_file() assert client.item.call_count == 1 client.item.assert_has_calls([ mock.call("/repos/foo/bar/contents/.github/mergify.yml", params={}), ])
async def test_get_mergify_config_location_from_cache( redis_cache: utils.RedisCache, ) -> None: client = mock.AsyncMock() client.auth.owner = "foo" client.item.side_effect = [ http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), http.HTTPNotFound("Not Found", request=mock.Mock(), response=mock.Mock()), github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("foobar"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "full_name": "foobar/xyz", "name": github_types.GitHubRepositoryName("xyz"), "private": False, "id": github_types.GitHubRepositoryIdType(0), "owner": gh_owner, "archived": False, "url": "", "html_url": "", "default_branch": github_types.GitHubRefType("ref"), }) installation = context.Installation( github_types.GitHubAccountIdType(0), github_types.GitHubLogin("foobar"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository(installation, gh_repo) await repository.get_mergify_config_file() assert client.item.call_count == 3 client.item.assert_has_calls([ mock.call("/repos/foobar/xyz/contents/.mergify.yml", params={}), mock.call("/repos/foobar/xyz/contents/.mergify/config.yml", params={}), mock.call("/repos/foobar/xyz/contents/.github/mergify.yml", params={}), ]) client.item.reset_mock() client.item.side_effect = [ github_types.GitHubContentFile({ "content": encodebytes("whatever".encode()).decode(), "type": "file", "path": ".github/mergify.yml", "sha": github_types.SHAType("zeazeaze"), }), ] repository._cache = context.RepositoryCache() await repository.get_mergify_config_file() assert client.item.call_count == 1 client.item.assert_has_calls([ mock.call("/repos/foobar/xyz/contents/.github/mergify.yml", params={}), ])
None, None, ) app.include_router(router) test_client = TestClient(app) ACCESS_TOKEN = "ACCESS" REFRESH_TOKEN = "REFRESH" @mock.patch( "fastapi_auth.routers.auth.AuthService.register", mock.AsyncMock(return_value={ "access": ACCESS_TOKEN, "refresh": REFRESH_TOKEN }), ) def test_register(): url = app.url_path_for("auth:register") data = { "email": "*****@*****.**", "username": "******", "password1": "12345678", "password2": "12345678", } response = test_client.post( url, json=data, )
def f_mock_transport(): return mock.AsyncMock(spec=asyncio.Transport)
async def test_get_pull_request_rule(redis_cache: utils.RedisCache) -> None: client = mock.Mock() get_reviews = [{ "user": { "login": "******", "id": 12321, "type": "User" }, "state": "APPROVED", "author_association": "MEMBER", }] get_files = [{"filename": "README.rst"}, {"filename": "setup.py"}] get_team_members = [{ "login": "******", "id": 12321 }, { "login": "******", "id": 2644 }] get_checks: typing.List[github_types.GitHubCheckRun] = [] get_statuses: typing.List[github_types.GitHubStatus] = [{ "context": "continuous-integration/fake-ci", "state": "success", "description": "foobar", "target_url": "http://example.com", "avatar_url": "", }] client.item = mock.AsyncMock(return_value={"permission": "write"}) async def client_items(url, *args, **kwargs): if url == "/repos/another-jd/name/pulls/1/reviews": for r in get_reviews: yield r elif url == "/repos/another-jd/name/pulls/1/files": for f in get_files: yield f elif url == "/repos/another-jd/name/commits/<sha>/check-runs": for c in get_checks: yield c elif url == "/repos/another-jd/name/commits/<sha>/status": for s in get_statuses: yield s elif url == "/orgs/another-jd/teams/my-reviewers/members": for tm in get_team_members: yield tm else: raise RuntimeError(f"not handled url {url}") client.items.side_effect = client_items installation = context.Installation( github_types.GitHubAccountIdType(2644), github_types.GitHubLogin("another-jd"), subscription.Subscription(redis_cache, 0, False, "", frozenset()), client, redis_cache, ) repository = context.Repository( installation, github_types.GitHubRepositoryName("name"), github_types.GitHubRepositoryIdType(123321), ) ctxt = await context.Context.create( repository, github_types.GitHubPullRequest({ "id": github_types.GitHubPullRequestId(0), "number": github_types.GitHubPullRequestNumber(1), "commits": 1, "html_url": "<html_url>", "merge_commit_sha": None, "maintainer_can_modify": True, "rebaseable": True, "state": "closed", "merged_by": None, "merged_at": None, "merged": False, "draft": False, "mergeable_state": "unstable", "labels": [], "changed_files": 1, "base": { "label": "repo", "ref": github_types.GitHubRefType("master"), "repo": { "id": github_types.GitHubRepositoryIdType(123321), "name": github_types.GitHubRepositoryName("name"), "full_name": "another-jd/name", "private": False, "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "owner": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, "sha": github_types.SHAType("mew"), }, "head": { "label": "foo", "ref": github_types.GitHubRefType("myfeature"), "sha": github_types.SHAType("<sha>"), "repo": { "id": github_types.GitHubRepositoryIdType(123321), "name": github_types.GitHubRepositoryName("head"), "full_name": "another-jd/head", "private": False, "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "owner": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }, "title": "My awesome job", "user": { "login": github_types.GitHubLogin("another-jd"), "id": github_types.GitHubAccountIdType(2644), "type": "User", "avatar_url": "", }, }), ) # Empty conditions pull_request_rules = rules.PullRequestRules([ rules.Rule(name="default", conditions=rules.RuleConditions([]), actions={}) ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([])) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([{ "name": "hello", "conditions": ["base:master"], "actions": {} }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello"] assert [r.name for r in match.matching_rules] == ["hello"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([])) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["base:master"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["hello", "backport"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([])) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["author:foobar"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["backport"] for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "hello", "conditions": ["author:another-jd"], "actions": {} }, { "name": "backport", "conditions": ["base:master"], "actions": {} }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["hello", "backport"] assert [r.name for r in match.matching_rules] == ["hello", "backport"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([])) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} # No match pull_request_rules = pull_request_rule_from_list([{ "name": "merge", "conditions": [ "base=xyz", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["merge"] assert [r.name for r in match.matching_rules] == [] pull_request_rules = pull_request_rule_from_list([{ "name": "merge", "conditions": [ "base=master", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["merge"] assert [r.name for r in match.matching_rules] == ["merge"] assert [ rules.EvaluatedRule.from_rule(r, rules.RuleMissingConditions([])) for r in match.rules ] == match.matching_rules for rule in match.rules: assert rule.actions == {} pull_request_rules = pull_request_rule_from_list([ { "name": "merge", "conditions": [ "base=master", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=2", ], "actions": {}, }, { "name": "fast merge", "conditions": [ "base=master", "label=fast-track", "check-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": {}, }, { "name": "fast merge with alternate ci", "conditions": [ "base=master", "label=fast-track", "check-success=continuous-integration/fake-ci-bis", "#approved-reviews-by>=1", ], "actions": {}, }, { "name": "fast merge from a bot", "conditions": [ "base=master", "author=mybot", "check-success=continuous-integration/fake-ci", ], "actions": {}, }, ]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == [ "merge", "fast merge", "fast merge with alternate ci", "fast merge from a bot", ] assert [r.name for r in match.matching_rules] == [ "merge", "fast merge", "fast merge with alternate ci", ] for rule in match.rules: assert rule.actions == {} assert match.matching_rules[0].name == "merge" assert len(match.matching_rules[0].missing_conditions) == 1 assert (str(match.matching_rules[0].missing_conditions[0]) == "#approved-reviews-by>=2") assert match.matching_rules[1].name == "fast merge" assert len(match.matching_rules[1].missing_conditions) == 1 assert str( match.matching_rules[1].missing_conditions[0]) == "label=fast-track" assert match.matching_rules[2].name == "fast merge with alternate ci" assert len(match.matching_rules[2].missing_conditions) == 2 assert str( match.matching_rules[2].missing_conditions[0]) == "label=fast-track" assert (str(match.matching_rules[2].missing_conditions[1]) == "check-success=continuous-integration/fake-ci-bis") # Team conditions with one review missing pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": [ "approved-reviews-by=@another-jd/my-reviewers", "#approved-reviews-by>=2", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 1 assert (str(match.matching_rules[0].missing_conditions[0]) == "#approved-reviews-by>=2") get_reviews.append({ "user": { "login": "******", "id": 2644, "type": "User" }, "state": "APPROVED", "author_association": "MEMBER", }) del ctxt._cache["reviews"] del ctxt._cache["consolidated_reviews"] # Team conditions with no review missing pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": [ "approved-reviews-by=@another-jd/my-reviewers", "#approved-reviews-by>=2", ], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Forbidden labels, when no label set pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": ["-label~=^(status/wip|status/blocked|review/need2)$"], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Forbidden labels, when forbiden label set ctxt.pull["labels"] = [{ "id": 0, "color": "#1234", "default": False, "name": "status/wip" }] match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 1 assert str(match.matching_rules[0].missing_conditions[0]) == ( "-label~=^(status/wip|status/blocked|review/need2)$") # Forbidden labels, when other label set ctxt.pull["labels"] = [{ "id": 0, "color": "#1234", "default": False, "name": "allowed" }] match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0 # Test team expander pull_request_rules = pull_request_rule_from_list([{ "name": "default", "conditions": ["author~=^(user1|user2|another-jd)$"], "actions": {}, }]) match = await pull_request_rules.get_pull_request_rule(ctxt) assert [r.name for r in match.rules] == ["default"] assert [r.name for r in match.matching_rules] == ["default"] assert match.matching_rules[0].name == "default" assert len(match.matching_rules[0].missing_conditions) == 0
def mock_cog(cog): cog.send_reply = mock.AsyncMock(return_value=ANOTHER_MESSAGE_MOCK) cog.on_cog_command_error = mock.AsyncMock() return cog
def __init__(self, message_id): self.id = message_id self.delete = mock.AsyncMock() self.reactions = []
async def test(): connector = mock.AsyncMock(return_value=mock.Mock()) pool = await Pool.setup(connector, size=5) assert len(pool.pool) == 5 assert connector.call_count == 5
async def _connector(): con = mock.Mock() con.ping = mock.AsyncMock(return_value=ping) con.close = mock.AsyncMock() return con
async def test_summary_synchronization_cache( redis_cache: utils.RedisCache, ) -> None: gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "owner": gh_owner, }) async def items(*args, **kwargs): if False: yield return async def post_check(*args, **kwargs): return mock.Mock() client = mock.AsyncMock() client.auth.get_access_token.return_value = "<token>" client.items = items client.post.side_effect = post_check sub = subscription.Subscription(redis_cache, 0, False, "", frozenset()) installation = context.Installation( gh_owner["id"], gh_owner["login"], sub, client, redis_cache, ) repository = context.Repository(installation, gh_repo["name"], gh_repo["id"]) ctxt = await context.Context.create( repository, { "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) assert await ctxt.get_cached_last_summary_head_sha() is None await ctxt.set_summary_check( check_api.Result(check_api.Conclusion.SUCCESS, "foo", "bar")) assert await ctxt.get_cached_last_summary_head_sha() == "old-sha-one" await ctxt.clear_cached_last_summary_head_sha() assert await ctxt.get_cached_last_summary_head_sha() is None
async def test_on_error_callback_coro(self): cb = mock.AsyncMock() await as_complete_failer(self.raise_error(), on_fail_callback=cb) cb.assert_called_once()
class TestSyncer(Syncer): """Syncer subclass with mocks for abstract methods for testing purposes.""" name = "test" _get_diff = mock.AsyncMock() _sync = mock.AsyncMock()
async def test_end_to_end_send_only_single_metadata_data(): response = mock.Mock() response.status_code = 200 post_mock = mock.AsyncMock(return_value=response) with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.get_generic_rest_adapter_base_url", return_value="https://hetida.de", ): with mock.patch( "hetdesrun.adapters.generic_rest.send_metadata.httpx.AsyncClient.post", new=post_mock, ): # one frame await send_data( { "inp_1": FilteredSink( ref_id="sink_id_1", type="metadata(int)", ref_id_type="SOURCE", ref_key="number", filters={}, ) }, {"inp_1": 55}, adapter_key= "test_end_to_end_send_only_metadata_data_adapter_key", ) assert post_mock.called # we got through to actually posting! func_name, args, kwargs = post_mock.mock_calls[0] assert kwargs["json"] == { "key": "number", "value": 55, "dataType": "int" } assert args[ 0] == "https://hetida.de/sources/sink_id_1/metadata/number" response.status_code = 400 with pytest.raises(AdapterConnectionError): await send_data( { "inp_1": FilteredSink( ref_id="sink_id_1", type="metadata(int)", ref_id_type="SOURCE", ref_key="number", filters={}, ) }, {"inp_1": 55}, adapter_key= "test_end_to_end_send_only_metadata_data_adapter_key", )
def test_init(test, endpoint_value, uid): api = USPSApi(uid, mock.AsyncMock(), test) assert api.api_user_id == uid assert api.test == test assert api.test_endpoint == endpoint_value
"test model", ) zha_device = await zha_device_restored(zigpy_dev) return zha_device def _send_time_changed(hass, seconds): """Send a time changed event.""" now = dt_util.utcnow() + timedelta(seconds=seconds) async_fire_time_changed(hass, now) @patch( "homeassistant.components.zha.core.channels.general.BasicChannel.async_initialize", new=mock.AsyncMock(), ) async def test_check_available_success(hass, device_with_basic_channel, zha_device_restored): """Check device availability success on 1st try.""" # pylint: disable=protected-access zha_device = await zha_device_restored(device_with_basic_channel) await async_enable_traffic(hass, [zha_device]) basic_ch = device_with_basic_channel.endpoints[3].basic basic_ch.read_attributes.reset_mock() device_with_basic_channel.last_seen = None assert zha_device.available is True _send_time_changed(hass, zha_device.consider_unavailable_time + 2) await hass.async_block_till_done()
def api(): return USPSApi('xxxxxxx', mock.AsyncMock(), False)
async def test_set_day(event_loop): ctx = mock.AsyncMock() await db.init_calendar(ctx, 'amosian', 'amosian') ctx.send.assert_called_once_with('Calendar initialized as amosian') await db.set_day(ctx, '2000')
def test_api(): return USPSApi('xxxxxxx', mock.AsyncMock(), True)
def fake_client() -> mock.Mock: async def items_call(url, *args, **kwargs): if url == "/repos/Mergifyio/mergify-engine/commits/the-head-sha/status": return elif url == "/repos/Mergifyio/mergify-engine/commits/the-head-sha/check-runs": yield github_types.GitHubCheckRun({ "head_sha": "ce587453ced02b1526dfb4cb910479d431683101", "details_url": "https://example.com", "status": "completed", "conclusion": "failure", "name": "failure", "id": 1234, "app": { "id": 1234, "name": "CI", "owner": { "type": "User", "id": 1234, "login": "******", "avatar_url": "https://example.com", }, }, "external_id": None, "pull_requests": [], "before": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "after": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "started_at": "", "completed_at": "", "html_url": "https://example.com", "check_suite": { "id": 1234 }, "output": { "summary": "", "title": "It runs!", "text": "", "annotations": [], "annotations_count": 0, "annotations_url": "https://example.com", }, }) yield github_types.GitHubCheckRun({ "head_sha": "ce587453ced02b1526dfb4cb910479d431683101", "details_url": "https://example.com", "status": "completed", "conclusion": "success", "name": "success", "id": 1235, "app": { "id": 1234, "name": "CI", "owner": { "type": "User", "id": 1234, "login": "******", "avatar_url": "https://example.com", }, }, "external_id": None, "pull_requests": [], "before": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "after": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "started_at": "", "completed_at": "", "html_url": "https://example.com", "check_suite": { "id": 1234 }, "output": { "summary": "", "title": "It runs!", "text": "", "annotations": [], "annotations_count": 0, "annotations_url": "https://example.com", }, }) yield github_types.GitHubCheckRun({ "head_sha": "ce587453ced02b1526dfb4cb910479d431683101", "details_url": "https://example.com", "status": "completed", "conclusion": "neutral", "name": "neutral", "id": 1236, "app": { "id": 1234, "name": "CI", "owner": { "type": "User", "id": 1234, "login": "******", "avatar_url": "https://example.com", }, }, "external_id": None, "pull_requests": [], "before": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "after": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "started_at": "", "completed_at": "", "html_url": "https://example.com", "check_suite": { "id": 1234 }, "output": { "summary": "", "title": "It runs!", "text": "", "annotations": [], "annotations_count": 0, "annotations_url": "https://example.com", }, }) yield github_types.GitHubCheckRun({ "head_sha": "ce587453ced02b1526dfb4cb910479d431683101", "details_url": "https://example.com", "status": "in_progress", "conclusion": None, "name": "pending", "id": 1237, "app": { "id": 1234, "name": "CI", "owner": { "type": "User", "id": 1234, "login": "******", "avatar_url": "https://example.com", }, }, "external_id": None, "pull_requests": [], "before": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "after": "4eef79d038b0327a5e035fd65059e556a55c6aa4", "started_at": "", "completed_at": "", "html_url": "https://example.com", "check_suite": { "id": 1234 }, "output": { "summary": "", "title": "It runs!", "text": "", "annotations": [], "annotations_count": 0, "annotations_url": "https://example.com", }, }) else: raise Exception(f"url not mocked: {url}") def item_call(url, *args, **kwargs): if url == "/repos/Mergifyio/mergify-engine/branches/main": return { "commit": { "sha": "sha1" }, "protection": { "enabled": False } } if url == "/repos/Mergifyio/mergify-engine/branches/main/protection": raise http.HTTPNotFound(message="boom", response=mock.Mock(), request=mock.Mock()) else: raise Exception(f"url not mocked: {url}") client = mock.Mock() client.item = mock.AsyncMock(side_effect=item_call) client.items = items_call return client
def setup_mock_(client_constructor): grpc_client = mock.AsyncMock() client_constructor.return_value = grpc_client return grpc_client
async def test_signals(redis_cache): gh_owner = github_types.GitHubAccount({ "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }) gh_repo = github_types.GitHubRepository({ "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(456), "full_name": "user/ref", "name": github_types.GitHubRepositoryName("name"), "private": False, "owner": gh_owner, }) client = mock.AsyncMock() client.auth.get_access_token.return_value = "<token>" sub = subscription.Subscription(redis_cache, 0, False, "", frozenset()) installation = context.Installation( gh_owner["id"], gh_owner["login"], sub, client, redis_cache, ) repository = context.Repository(installation, gh_repo["name"], gh_repo["id"]) ctxt = await context.Context.create( repository, { "title": "", "id": github_types.GitHubPullRequestId(0), "maintainer_can_modify": False, "rebaseable": False, "draft": False, "merge_commit_sha": None, "labels": [], "number": github_types.GitHubPullRequestNumber(6), "commits": 1, "merged": True, "state": "closed", "changed_files": 1, "html_url": "<html_url>", "base": { "label": "", "sha": github_types.SHAType("sha"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "ref": github_types.GitHubRefType("ref"), "label": "", "repo": gh_repo, }, "head": { "label": "", "sha": github_types.SHAType("old-sha-one"), "ref": github_types.GitHubRefType("fork"), "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "repo": { "archived": False, "url": "", "default_branch": github_types.GitHubRefType(""), "id": github_types.GitHubRepositoryIdType(123), "full_name": "fork/other", "name": github_types.GitHubRepositoryName("other"), "private": False, "owner": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, }, }, "user": { "login": github_types.GitHubLogin("user"), "id": github_types.GitHubAccountIdType(0), "type": "User", "avatar_url": "", }, "merged_by": None, "merged_at": None, "mergeable_state": "clean", }, ) assert len(signals.SIGNALS) == 0 signals.setup() assert len(signals.SIGNALS) == 1 with mock.patch( "mergify_engine_signals.noop.Signal.__call__") as signal_method: await signals.send(ctxt, "action.update") signal_method.assert_called_once_with(ctxt, "action.update")
async def test(): connector = mock.AsyncMock(return_value=mock.Mock()) pool = await Pool.setup(connector, size=1) assert len(pool.pool) == 1 connector.assert_called_once()