コード例 #1
0
async def test_rules_conditions_schedule():
    pulls = [
        FakeQueuePullRequest({
            "number": 1,
            "author": "me",
            "base": "main",
            "current-timestamp": date.utcnow(),
            "current-time": date.utcnow(),
            "current-day": date.Day(22),
            "current-month": date.Month(9),
            "current-year": date.Year(2021),
            "current-day-of-week": date.DayOfWeek(3),
        }),
    ]
    schema = voluptuous.Schema(
        voluptuous.All(
            [voluptuous.Coerce(rules.RuleConditionSchema)],
            voluptuous.Coerce(conditions.QueueRuleConditions),
        ))

    c = schema([
        "base=main",
        "schedule=MON-FRI 08:00-17:00",
        "schedule=MONDAY-FRIDAY 10:00-12:00",
        "schedule=SAT-SUN 07:00-12:00",
    ])

    await c(pulls)

    assert (c.get_summary() == """- [X] `base=main`
- [X] `schedule=MON-FRI 08:00-17:00`
- [ ] `schedule=MONDAY-FRIDAY 10:00-12:00`
- [ ] `schedule=SAT-SUN 07:00-12:00`
""")
コード例 #2
0
def _as_datetime(value: typing.Any) -> datetime.datetime:
    if isinstance(value, datetime.datetime):
        return value
    elif isinstance(value, date.RelativeDatetime):
        return value.value
    elif isinstance(value, datetime.timedelta):
        dt = date.utcnow()
        return dt + value
    elif isinstance(value, date.PartialDatetime):
        dt = date.utcnow().replace(
            hour=0,
            minute=0,
            second=0,
            microsecond=0,
        )
        if isinstance(value, date.DayOfWeek):
            return dt + datetime.timedelta(days=value.value - dt.isoweekday())
        elif isinstance(value, date.Day):
            return dt.replace(day=value.value)
        elif isinstance(value, date.Month):
            return dt.replace(month=value.value, day=1)
        elif isinstance(value, date.Year):
            return dt.replace(year=value.value, month=1, day=1)
        else:
            return date.DT_MAX
    elif isinstance(value, date.Time):
        return date.utcnow().replace(
            hour=value.hour,
            minute=value.minute,
            second=0,
            microsecond=0,
            tzinfo=value.tzinfo,
        )
    else:
        return date.DT_MAX
コード例 #3
0
def get_scheduled_pr() -> FakePR:
    return FakePR(
        {
            "current-day-of-week": date.DayOfWeek(date.utcnow().isoweekday()),
            "current-year": date.Year(date.utcnow().year),
            "current-day": date.Day(date.utcnow().day),
            "number": 3433,
            "current-time": date.utcnow(),
            "current-month": date.Month(date.utcnow().month),
        }
    )
コード例 #4
0
 def record_date(_):
     if records:
         records.append(date.utcnow())
         return httpx.Response(200, text="It works now !")
     else:
         records.append(date.utcnow())
         return httpx.Response(
             429,
             text="This is a 429 error",
             headers={"Retry-After": retry_after},
         )
コード例 #5
0
async def plan_next_refresh(
    ctxt: context.Context,
    _rules: typing.Union[typing.List[rules.EvaluatedRule],
                         typing.List[rules.EvaluatedQueueRule]],
    pull_request: context.BasePullRequest,
) -> None:
    best_bet = await _get_current_refresh_datetime(ctxt.repository,
                                                   ctxt.pull["number"])
    if best_bet is not None and best_bet < date.utcnow():
        best_bet = None

    for rule in _rules:
        f = filter.NearDatetimeFilter(
            rule.conditions.extract_raw_filter_tree())
        live_resolvers.configure_filter(ctxt.repository, f)
        try:
            bet = await f(pull_request)
        except live_resolvers.LiveResolutionFailure:
            continue
        if best_bet is None or best_bet > bet:
            best_bet = bet

    if best_bet is None or best_bet >= date.DT_MAX:
        zset_subkey = _redis_key(ctxt.repository, ctxt.pull["number"])
        removed = await ctxt.redis.zrem(DELAYED_REFRESH_KEY, zset_subkey)
        if removed is not None and removed > 0:
            ctxt.log.info("unplan to refresh pull request")
    else:
        await _set_current_refresh_datetime(ctxt.repository,
                                            ctxt.pull["number"], best_bet)
        ctxt.log.info("plan to refresh pull request",
                      refresh_planned_at=best_bet.isoformat())
コード例 #6
0
async def exec_action(
    method_name: typing.Literal["run", "cancel"],
    rule: rules.EvaluatedRule,
    action: str,
    ctxt: context.Context,
) -> check_api.Result:
    try:
        if method_name == "run":
            method = rule.actions[action].run
        elif method_name == "cancel":
            method = rule.actions[action].cancel
        else:
            raise RuntimeError("wrong method_name")
        return await method(ctxt, rule)
    except Exception as e:  # pragma: no cover
        # Forward those to worker
        if (
            exceptions.should_be_ignored(e)
            or exceptions.need_retry(e)
            or isinstance(e, exceptions.UnprocessablePullRequest)
        ):
            raise
        # NOTE(sileht): the action fails, this is a bug!!!, so just set the
        # result as pending and retry in 5 minutes...
        ctxt.log.error("action failed", action=action, rule=rule, exc_info=True)
        await delayed_refresh.plan_refresh_at_least_at(
            ctxt.repository,
            ctxt.pull["number"],
            date.utcnow() + datetime.timedelta(minutes=5),
        )
        return check_api.Result(
            check_api.Conclusion.PENDING,
            f"Action '{action}' has unexpectedly failed, Mergify team is working on it, the state will be refreshed automatically.",
            "",
        )
コード例 #7
0
async def test_client_retry_429_retry_after_as_absolute_date(
    respx_mock: respx.MockRouter, ) -> None:
    expected_retry = date.utcnow() + datetime.timedelta(seconds=2)
    retry_after = http_date(expected_retry)
    when = await _do_test_client_retry_429(respx_mock, retry_after)
    # ms are cut by http_date, so we allow a 1 second delta :(
    assert when >= expected_retry - datetime.timedelta(seconds=1)
コード例 #8
0
async def send_branch_refresh(
    redis_cache: RedisCache,
    redis_stream: RedisStream,
    repository: github_types.GitHubRepository,
    action: github_types.GitHubEventRefreshActionType,
    ref: github_types.GitHubRefType,
    source: str,
) -> None:
    LOG.info(
        "sending repository branch refresh",
        gh_owner=repository["owner"]["login"],
        gh_repo=repository["name"],
        gh_private=repository["private"],
        gh_ref=ref,
        action=action,
        source=source,
    )
    score = str(date.utcnow().timestamp() * 10)
    await _send_refresh(redis_cache,
                        redis_stream,
                        repository,
                        action,
                        source,
                        ref=ref,
                        score=score)
コード例 #9
0
def test_embarked_pull_old_serialization() -> None:
    queue_config = rules.QueueConfig(
        priority=0,
        speculative_checks=5,
        batch_size=1,
        batch_max_wait_time=datetime.timedelta(seconds=0),
        allow_inplace_checks=True,
        disallow_checks_interruption_from_queues=[],
        checks_timeout=None,
        draft_bot_account=None,
    )
    config = queue.PullQueueConfig(
        name=rules.QueueName("foo"),
        strict_method="merge",
        update_method="merge",
        priority=0,
        effective_priority=0,
        bot_account=None,
        update_bot_account=None,
        queue_config=queue_config,
    )

    now = date.utcnow()
    old_typed = merge_train.EmbarkedPull.OldSerialized(
        github_types.GitHubPullRequestNumber(1234), config, now
    )
    old_untyped = json.loads(json.dumps(old_typed))
    ep = merge_train.EmbarkedPull.deserialize(mock.Mock(), old_untyped)
    assert ep.user_pull_request_number == 1234
    assert ep.config == config
    assert ep.queued_at == now
コード例 #10
0
def _extract_slim_event(event_type, data):
    slim_data = {
        "received_at": date.utcnow().isoformat(),
        "sender": {
            "id": data["sender"]["id"],
            "login": data["sender"]["login"],
            "type": data["sender"]["type"],
        },
    }

    if event_type == "status":
        # To get PR from sha
        slim_data["sha"] = data["sha"]
        slim_data["context"] = data["context"]

    elif event_type == "refresh":
        # To get PR from sha or branch name
        slim_data["action"] = data["action"]
        slim_data["ref"] = data["ref"]
        slim_data["pull_request_number"] = data["pull_request_number"]
        slim_data["source"] = data["source"]

    elif event_type == "push":
        # To get PR from sha
        slim_data["ref"] = data["ref"]
        slim_data["before"] = data["before"]
        slim_data["after"] = data["after"]
        slim_data["pusher"] = data["pusher"]

    elif event_type in ("check_suite", "check_run"):
        # To get PR from sha
        slim_data["action"] = data["action"]
        slim_data["app"] = {"id": data[event_type]["app"]["id"]}
        slim_data[event_type] = {
            "head_sha":
            data[event_type]["head_sha"],
            "pull_requests": [{
                "number": p["number"],
                "base": {
                    "repo": {
                        "url": p["base"]["repo"]["url"]
                    }
                },
            } for p in data[event_type]["pull_requests"]],
        }
        if event_type == "check_run":
            slim_data["check_run"]["name"] = data["check_run"]["name"]

    elif event_type == "pull_request":
        # For pull_request opened/synchronize/closed
        slim_data["action"] = data["action"]
        if slim_data["action"] == "synchronize":
            slim_data["before"] = data["before"]
            slim_data["after"] = data["after"]

    elif event_type == "issue_comment":
        # For commands runner
        slim_data["comment"] = data["comment"]

    return slim_data
コード例 #11
0
async def update(
    ctxt: context.Context,
    event: signals.EventName,
    metadata: typing.Optional[signals.SignalMetadata],
) -> None:
    key = get_last_seen_key(ctxt.repository.installation.owner_id)
    now = date.utcnow().isoformat()
    await ctxt.redis.setex(key, RETENTION_SECONDS, now)
コード例 #12
0
async def create_queue_freeze(
    queue_freeze_payload: QueueFreezePayload,
    application: application_mod.Application = fastapi.Depends(  # noqa: B008
        security.get_application
    ),
    queue_name: rules.QueueName = fastapi.Path(  # noqa: B008
        ..., description="The name of the queue"
    ),
    repository_ctxt: context.Repository = fastapi.Depends(  # noqa: B008
        security.get_repository_context
    ),
) -> QueueFreezeResponse:

    if queue_freeze_payload.reason == "":
        queue_freeze_payload.reason = "No freeze reason was specified."

    config_file = await repository_ctxt.get_mergify_config_file()
    if config_file is None:
        raise fastapi.HTTPException(
            status_code=404, detail="Mergify configuration file is missing."
        )

    config = get_mergify_config(config_file)
    queue_rules = config["queue_rules"]
    if all(queue_name != rule.name for rule in queue_rules):
        raise fastapi.HTTPException(
            status_code=404, detail=f'The queue "{queue_name}" does not exist.'
        )

    qf = await freeze.QueueFreeze.get(repository_ctxt, queue_name)
    if qf is None:
        qf = freeze.QueueFreeze(
            repository=repository_ctxt,
            name=queue_name,
            reason=queue_freeze_payload.reason,
            application_name=application.name,
            application_id=application.id,
            freeze_date=date.utcnow(),
        )
        await qf.save()

    elif qf.reason != queue_freeze_payload.reason:
        qf.reason = queue_freeze_payload.reason
        await qf.save()

    return QueueFreezeResponse(
        queue_freezes=[
            QueueFreeze(
                name=qf.name,
                reason=qf.reason,
                application_name=qf.application_name,
                application_id=qf.application_id,
                freeze_date=qf.freeze_date,
            )
        ],
    )
コード例 #13
0
ファイル: last_seen.py プロジェクト: CamClrt/mergify-engine
 async def __call__(
     self,
     repository: "context.Repository",
     pull_request: github_types.GitHubPullRequestNumber,
     event: signals.EventName,
     metadata: signals.EventMetadata,
 ) -> None:
     key = get_last_seen_key(repository.installation.owner_id)
     now = date.utcnow().isoformat()
     await repository.installation.redis.cache.setex(
         key, RETENTION_SECONDS, now)
コード例 #14
0
async def create_initial_summary(
        redis: utils.RedisCache,
        event: github_types.GitHubEventPullRequest) -> None:
    owner = event["repository"]["owner"]
    repo = event["pull_request"]["base"]["repo"]

    if not await redis.exists(
            context.Repository.get_config_location_cache_key(
                owner["login"],
                repo["name"],
            )):
        # Mergify is probably not activated on this repo
        return

    # NOTE(sileht): It's possible that a "push" event creates a summary before we
    # received the pull_request/opened event.
    # So we check first if a summary does not already exists, to not post
    # the summary twice. Since this method can ran in parallel of the worker
    # this is not a 100% reliable solution, but if we post a duplicate summary
    # check_api.set_check_run() handle this case and update both to not confuse users.
    summary_exists = await context.Context.summary_exists(
        redis, owner["id"], repo["id"], event["pull_request"])

    if summary_exists:
        return

    installation_json = await github.get_installation_from_account_id(
        owner["id"])
    async with github.aget_client(installation_json) as client:
        post_parameters = {
            "name": constants.SUMMARY_NAME,
            "head_sha": event["pull_request"]["head"]["sha"],
            "status": check_api.Status.IN_PROGRESS.value,
            "started_at": date.utcnow().isoformat(),
            "details_url": f"{event['pull_request']['html_url']}/checks",
            "output": {
                "title": "Your rules are under evaluation",
                "summary": "Be patient, the page will be updated soon.",
            },
            "external_id": str(event["pull_request"]["number"]),
        }
        try:
            await client.post(
                f"/repos/{event['pull_request']['base']['user']['login']}/{event['pull_request']['base']['repo']['name']}/check-runs",
                api_version="antiope",
                json=post_parameters,
            )
        except http.HTTPClientSideError as e:
            if e.status_code == 422 and "No commit found for SHA" in e.message:
                return
            raise
コード例 #15
0
async def send(
    redis_stream: utils.RedisStream,
    redis_cache: utils.RedisCache,
) -> None:
    score = date.utcnow().timestamp()
    keys = await redis_cache.zrangebyscore(DELAYED_REFRESH_KEY, "-inf", score)
    if not keys:
        return

    pipe = await redis_stream.pipeline()
    keys_to_delete = set()
    for subkey in keys:
        (
            owner_id_str,
            owner_login,
            repository_id_str,
            repository_name,
            pull_request_number_str,
        ) = subkey.split("~")
        owner_id = github_types.GitHubAccountIdType(int(owner_id_str))
        repository_id = github_types.GitHubRepositoryIdType(
            int(repository_id_str))
        pull_request_number = github_types.GitHubPullRequestNumber(
            int(pull_request_number_str))

        LOG.info(
            "sending delayed pull request refresh",
            gh_owner=owner_login,
            gh_repo=repository_name,
            action="internal",
            source="delayed-refresh",
        )

        await worker.push(
            pipe,
            owner_id,
            owner_login,
            repository_id,
            repository_name,
            pull_request_number,
            "refresh",
            {
                "action": "internal",
                "ref": None,
                "source": "delayed-refresh",
            },  # type: ignore[typeddict-item]
        )
        keys_to_delete.add(subkey)

    await pipe.execute()
    await redis_cache.zrem(DELAYED_REFRESH_KEY, *keys_to_delete)
コード例 #16
0
async def test_datetime_binary() -> None:
    assert "foo>=2012-01-05T00:00:00" == str(
        filter.BinaryFilter({">=": ("foo", dtime(5))})
    )
    assert "foo<=2012-01-05T23:59:00" == str(
        filter.BinaryFilter({"<=": ("foo", dtime(5).replace(hour=23, minute=59))})
    )
    assert "foo<=2012-01-05T03:09:00" == str(
        filter.BinaryFilter({"<=": ("foo", dtime(5).replace(hour=3, minute=9))})
    )

    f = filter.BinaryFilter({"<=": ("foo", date.utcnow())})
    assert await f(FakePR({"foo": dtime(14)}))
    assert await f(FakePR({"foo": dtime(2)}))
    assert await f(FakePR({"foo": dtime(5)}))
    assert not await f(FakePR({"foo": dtime(18)}))
    assert not await f(FakePR({"foo": dtime(23)}))

    f = filter.BinaryFilter({">=": ("foo", date.utcnow())})
    assert await f(FakePR({"foo": dtime(14)}))
    assert not await f(FakePR({"foo": dtime(2)}))
    assert not await f(FakePR({"foo": dtime(5)}))
    assert await f(FakePR({"foo": dtime(18)}))
    assert await f(FakePR({"foo": dtime(23)}))
コード例 #17
0
ファイル: http.py プロジェクト: CamClrt/mergify-engine
    def __call__(self, retry_state: tenacity.RetryCallState) -> float:
        if retry_state.outcome is None:
            return 0

        exc = retry_state.outcome.exception()
        if exc is None or not isinstance(exc, HTTPStatusError):
            return 0

        value = exc.response.headers.get("retry-after")
        if value is None:
            return 0
        elif value.isdigit():
            return int(value)

        d = parse_date(value)
        if d is None:
            return 0
        return max(0, (d - date.utcnow()).total_seconds())
コード例 #18
0
async def test_time_binary() -> None:
    assert "foo>=00:00" == str(
        filter.BinaryFilter({">=": ("foo", date.Time(0, 0, UTC))})
    )
    assert "foo<=23:59" == str(
        filter.BinaryFilter({"<=": ("foo", date.Time(23, 59, UTC))})
    )
    assert "foo<=03:09" == str(
        filter.BinaryFilter({"<=": ("foo", date.Time(3, 9, UTC))})
    )
    assert "foo<=03:09[Europe/Paris]" == str(
        filter.BinaryFilter(
            {"<=": ("foo", date.Time(3, 9, zoneinfo.ZoneInfo("Europe/Paris")))}
        )
    )

    now = date.utcnow()

    f = filter.BinaryFilter({"<=": ("foo", date.Time(5, 8, UTC))})
    assert await f(FakePR({"foo": now.replace(hour=5, minute=8)}))
    assert await f(FakePR({"foo": now.replace(hour=2, minute=1)}))
    assert await f(FakePR({"foo": now.replace(hour=5, minute=1)}))
    assert not await f(FakePR({"foo": now.replace(hour=6, minute=2)}))
    assert not await f(FakePR({"foo": now.replace(hour=8, minute=9)}))

    f = filter.BinaryFilter({">=": ("foo", date.Time(5, 8, UTC))})
    assert await f(FakePR({"foo": now.replace(hour=5, minute=8)}))
    assert not await f(FakePR({"foo": now.replace(hour=2, minute=1)}))
    assert not await f(FakePR({"foo": now.replace(hour=5, minute=1)}))
    assert await f(FakePR({"foo": now.replace(hour=6, minute=2)}))
    assert await f(FakePR({"foo": now.replace(hour=8, minute=9)}))

    f = filter.BinaryFilter(
        {">=": ("foo", date.Time(5, 8, zoneinfo.ZoneInfo("Europe/Paris")))}
    )
    assert await f(FakePR({"foo": now.replace(hour=4, minute=8)}))
    assert not await f(FakePR({"foo": now.replace(hour=1, minute=1)}))
    assert not await f(FakePR({"foo": now.replace(hour=4, minute=1)}))
    assert await f(FakePR({"foo": now.replace(hour=5, minute=2)}))
    assert await f(FakePR({"foo": now.replace(hour=7, minute=9)}))
コード例 #19
0
    def _operator(value: typing.Any, ref: typing.Any) -> datetime.datetime:
        if value is None:
            return date.DT_MAX
        try:
            dt_value = _as_datetime(value).astimezone(datetime.timezone.utc)
            dt_ref = _as_datetime(ref).astimezone(datetime.timezone.utc)

            handle_equality = op in (
                operator.eq,
                operator.ne,
                operator.le,
                operator.ge,
            )
            if handle_equality and dt_value == dt_ref:
                # NOTE(sileht): The condition will change...
                if isinstance(ref, date.PartialDatetime):
                    if isinstance(value, date.DayOfWeek):
                        # next day
                        dt_ref = dt_ref + datetime.timedelta(days=1)
                    elif isinstance(ref, date.Day):
                        # next day
                        dt_ref = dt_ref + datetime.timedelta(days=1)
                    elif isinstance(ref, date.Month):
                        # first day of next month
                        dt_ref = dt_ref.replace(day=1)
                        dt_ref = dt_ref + datetime.timedelta(days=32)
                        dt_ref = dt_ref.replace(day=1)
                    elif isinstance(ref, date.Year):
                        # first day of next year
                        dt_ref = dt_ref.replace(month=1, day=1)
                        dt_ref = dt_ref + datetime.timedelta(days=366)
                        dt_ref = dt_ref.replace(month=1, day=1)
                    return _dt_in_future(
                        dt_ref.replace(hour=0, minute=0, second=0, microsecond=0)
                    )
                elif isinstance(ref, date.RelativeDatetime):
                    return date.utcnow() + datetime.timedelta(minutes=1)
                return _dt_in_future(dt_ref + datetime.timedelta(minutes=1))
            elif isinstance(ref, date.RelativeDatetime):
                return _dt_in_future(dt_value + (date.utcnow() - dt_ref))
            elif dt_value < dt_ref:
                return _dt_in_future(dt_ref)
            else:
                if isinstance(ref, date.Time):
                    # Condition will change next day at 00:00:00
                    dt_ref = dt_ref + datetime.timedelta(days=1)
                elif isinstance(value, date.DayOfWeek):
                    dt_ref = dt_ref + datetime.timedelta(days=7)
                elif isinstance(ref, date.Day):
                    # Condition will change, 1st day of next month at 00:00:00
                    dt_ref = dt_ref.replace(day=1)
                    dt_ref = dt_ref + datetime.timedelta(days=32)
                    if op in (operator.eq, operator.ne):
                        dt_ref = dt_ref.replace(day=ref.value)
                    else:
                        dt_ref = dt_ref.replace(day=1)
                elif isinstance(ref, date.Month):
                    # Condition will change, 1st January of next year at 00:00:00
                    dt_ref = dt_ref.replace(month=1, day=1)
                    dt_ref = dt_ref + datetime.timedelta(days=366)
                    if op in (operator.eq, operator.ne):
                        dt_ref = dt_ref.replace(month=ref.value, day=1)
                    else:
                        dt_ref = dt_ref.replace(month=1, day=1)
                else:
                    return date.DT_MAX
                if op in (operator.eq, operator.ne):
                    return _dt_in_future(dt_ref)
                else:
                    return _dt_in_future(
                        dt_ref.replace(hour=0, minute=0, second=0, microsecond=0)
                    )
        except OverflowError:
            return date.DT_MAX
コード例 #20
0
async def test_client_retry_429_retry_after_as_seconds(
    respx_mock: respx.MockRouter, ) -> None:
    now = date.utcnow()
    when = await _do_test_client_retry_429(respx_mock, "1")
    elapsed_seconds = (when - now).total_seconds()
    assert 0.97 < elapsed_seconds <= 1.03
コード例 #21
0
def dtime(day: int) -> datetime.datetime:
    return date.utcnow().replace(day=day)
コード例 #22
0
ファイル: check_api.py プロジェクト: CamClrt/mergify-engine
async def set_check_run(
    ctxt: "context.Context",
    name: str,
    result: Result,
    external_id: typing.Optional[str] = None,
    skip_cache: bool = False,
) -> github_types.CachedGitHubCheckRun:
    if result.conclusion is Conclusion.PENDING:
        status = Status.IN_PROGRESS
    else:
        status = Status.COMPLETED

    started_at = (result.started_at or date.utcnow()).isoformat()

    post_parameters = GitHubCheckRunParameters({
        "name":
        name,
        "head_sha":
        ctxt.pull["head"]["sha"],
        "status":
        typing.cast(github_types.GitHubCheckRunStatus, status.value),
        "started_at":
        typing.cast(github_types.ISODateTimeType, started_at),
        "details_url":
        f"{ctxt.pull['html_url']}/checks",
        "output": {
            "title": result.title,
            "summary": result.summary,
        },
    })

    if result.annotations is not None:
        post_parameters["output"]["annotations"] = result.annotations

    # Maximum output/summary length for Check API is 65535
    summary = post_parameters["output"]["summary"]
    if summary:
        post_parameters["output"]["summary"] = utils.unicode_truncate(
            summary, 65535, "…")

    if external_id:
        post_parameters["external_id"] = external_id

    if status is Status.COMPLETED:
        ended_at = (result.ended_at or date.utcnow()).isoformat()
        post_parameters["conclusion"] = result.conclusion.value
        post_parameters["completed_at"] = typing.cast(
            github_types.ISODateTimeType, ended_at)

    if skip_cache:
        checks = sorted(
            await get_checks_for_ref(
                ctxt,
                ctxt.pull["head"]["sha"],
                check_name=name,
                app_id=config.INTEGRATION_ID,
            ),
            key=lambda c: c["id"],
            reverse=True,
        )
    else:
        checks = sorted(
            (c
             for c in await ctxt.pull_engine_check_runs if c["name"] == name),
            key=lambda c: c["id"],
            reverse=True,
        )

    if len(checks) >= 2:
        ctxt.log.warning(
            "pull requests with duplicate checks",
            checks=checks,
            skip_cache=skip_cache,
            all_checks=await ctxt.pull_engine_check_runs,
            fresh_checks=await
            get_checks_for_ref(ctxt,
                               ctxt.pull["head"]["sha"],
                               app_id=config.INTEGRATION_ID),
        )

    if not checks or (Status(checks[0]["status"]) == Status.COMPLETED
                      and status == Status.IN_PROGRESS):
        # NOTE(sileht): First time we see it, or the previous one have been completed and
        # now go back to in_progress. Since GitHub doesn't allow to change status of
        # completed check-runs, we have to create a new one.
        new_check = to_check_run_light(
            typing.cast(
                github_types.GitHubCheckRun,
                (await ctxt.client.post(
                    f"{ctxt.base_url}/check-runs",
                    api_version="antiope",
                    json=post_parameters,
                )).json(),
            ))
    else:
        post_parameters["details_url"] += f"?check_run_id={checks[0]['id']}"

        # Don't do useless update
        if check_need_update(checks[0], post_parameters):
            new_check = to_check_run_light(
                typing.cast(
                    github_types.GitHubCheckRun,
                    (await ctxt.client.patch(
                        f"{ctxt.base_url}/check-runs/{checks[0]['id']}",
                        api_version="antiope",
                        json=post_parameters,
                    )).json(),
                ))
        else:
            new_check = checks[0]

    if not skip_cache:
        await ctxt.update_cached_check_runs(new_check)
    return new_check
コード例 #23
0
def _dt_in_future(value: datetime.datetime) -> datetime.datetime:
    if value < date.utcnow():
        return date.DT_MAX
    return value
コード例 #24
0
def test_time_compare():
    utc = datetime.timezone.utc
    with freeze_time("2021-09-22T08:00:05", tz_offset=0):
        assert datetime.datetime(2021, 9, 22, 8, 0, 5,
                                 tzinfo=utc) >= date.Time(8, 0, utc)

    with freeze_time("2012-01-14T12:15:00", tz_offset=0):
        assert date.Time(12, 0, utc) < date.utcnow()
        assert date.Time(15, 45, utc) > date.utcnow()
        assert date.Time(12, 15, utc) == date.utcnow()
        assert date.utcnow() > date.Time(12, 0, utc)
        assert date.utcnow() < date.Time(15, 45, utc)
        assert date.utcnow() == date.Time(12, 15, utc)
        assert date.Time(13, 15, utc) == date.Time(13, 15, utc)
        assert date.Time(13, 15, utc) < date.Time(15, 15, utc)
        assert date.Time(15, 0, utc) > date.Time(5, 0, utc)

        # TZ that endup the same day
        zone = zoneinfo.ZoneInfo("Europe/Paris")
        assert date.Time(10, 0, zone) < date.utcnow()
        assert date.Time(18, 45, zone) > date.utcnow()
        assert date.Time(13, 15, zone) == date.utcnow()
        assert date.utcnow() > date.Time(10, 0, zone)
        assert date.utcnow() < date.Time(18, 45, zone)
        assert date.utcnow() == date.Time(13, 15, zone)
        assert date.Time(13, 15, zone) == date.Time(13, 15, zone)
        assert date.Time(13, 15, zone) < date.Time(15, 15, zone)
        assert date.Time(15, 0, zone) > date.Time(5, 0, zone)

        # TZ that endup the next day GMT + 13
        zone = zoneinfo.ZoneInfo("Pacific/Auckland")
        assert date.Time(0, 2, zone) < date.utcnow()
        assert date.Time(2, 9, zone) > date.utcnow()
        assert date.Time(1, 15, zone) == date.utcnow()
        assert date.utcnow() > date.Time(0, 2, zone)
        assert date.utcnow() < date.Time(2, 9, zone)
        assert date.utcnow() == date.Time(1, 15, zone)
        assert date.Time(13, 15, zone) == date.Time(13, 15, zone)
        assert date.Time(13, 15, zone) < date.Time(15, 15, zone)
        assert date.Time(15, 0, zone) > date.Time(5, 0, zone)

        assert date.utcnow() == date.utcnow()
        assert (date.utcnow() > date.utcnow()) is False