Ejemplo n.º 1
0
def get_merge_method(
    cfg_merge_method: Optional[MergeMethod],
    valid_merge_methods: List[MergeMethod],
    log: structlog.BoundLogger,
) -> MergeMethod:
    if cfg_merge_method is not None:
        return cfg_merge_method

    # take the first valid merge method.
    for merge_method in MERGE_METHODS:
        if merge_method in valid_merge_methods:
            return merge_method

    # NOTE(chdsbd): I don't think the following code should be reachable in
    # production, but I don't want to blow things up with an assert.
    log.warning(
        "no merge methods selected.",
        cfg_merge_method=cfg_merge_method,
        valid_merge_methods=valid_merge_methods,
    )
    return MergeMethod.merge
Ejemplo n.º 2
0
def check_variable(
    data_var_name: str,
    data_var: xr.DataArray,
    definitions: T.Dict[str, T.Dict[str, str]],
    log: structlog.BoundLogger = LOGGER,
) -> None:
    attrs = sanitise_mapping(data_var.attrs, log)
    if data_var_name in definitions:
        definition = definitions[data_var_name]
    else:
        log.warning("unexpected name for variable")
        definition = guess_definition(attrs, definitions, log)
    check_variable_attrs(data_var.attrs,
                         definition,
                         dtype=data_var.dtype.name,
                         log=log)
    check_variable_data(data_var, log=log)
    if data_var.dims == (data_var_name, ):
        increasing = definition.get("stored_direction",
                                    "increasing") == "increasing"
        check_coordinate_data(data_var_name, data_var, increasing, log)
Ejemplo n.º 3
0
def get_merge_method(
    cfg_merge_method: Optional[MergeMethod],
    valid_merge_methods: List[MergeMethod],
    labels: List[str],
    log: structlog.BoundLogger,
) -> MergeMethod:

    # parse merge.method override label
    # example: `kodiak: merge.method = "rebase"`
    for label in labels:
        if not label.startswith("kodiak:"):
            continue
        # we have an existing label "kodiak:disabled". This label will not parse
        # here and will be ignored.
        _start, _sep, maybe_config = label.partition("kodiak:")
        try:
            merge_method_override = MergeMethodOverride.parse_obj(
                toml.loads(maybe_config)
            )
        except (toml.TomlDecodeError, pydantic.ValidationError):
            continue
        return merge_method_override.merge.method

    if cfg_merge_method is not None:
        return cfg_merge_method

    # take the first valid merge method.
    for merge_method in MERGE_METHODS:
        if merge_method in valid_merge_methods:
            return merge_method

    # NOTE(chdsbd): I don't think the following code should be reachable in
    # production, but I don't want to blow things up with an assert.
    log.warning(
        "no merge methods selected.",
        cfg_merge_method=cfg_merge_method,
        valid_merge_methods=valid_merge_methods,
    )
    return MergeMethod.merge
Ejemplo n.º 4
0
def check_dataset_attrs(dataset_attrs: T.Mapping[T.Hashable, T.Any],
                        log: structlog.BoundLogger = LOGGER) -> None:
    attrs = sanitise_mapping(dataset_attrs, log)
    conventions = attrs.get("Conventions")
    if conventions is None:
        log.warning("missing required 'Conventions' global attribute")
    elif conventions not in {"CF-1.8", "CF-1.7", "CF-1.6"}:
        log.warning("invalid 'Conventions' value", conventions=conventions)

    for attr_name in CDM_ATTRS:
        if attr_name not in attrs:
            log.warning(f"missing recommended global attribute '{attr_name}'")
Ejemplo n.º 5
0
def check_variable_attrs(
    variable_attrs: T.Mapping[T.Hashable, T.Any],
    definition: T.Dict[str, str],
    dtype: T.Optional[str] = None,
    log: structlog.BoundLogger = LOGGER,
) -> None:
    attrs = sanitise_mapping(variable_attrs, log)

    if "long_name" not in attrs:
        log.warning("missing recommended attribute 'long_name'")
    if "units" not in attrs:
        if dtype not in TIME_DTYPE_NAMES:
            log.warning("missing recommended attribute 'units'")
    else:
        units = attrs.get("units")
        expected_units = definition.get("units")
        if expected_units is not None:
            log = log.bind(expected_units=expected_units)
            cf_units = cfunits.Units(units)
            if not cf_units.isvalid:
                log.warning("'units' attribute not valid", units=units)
            else:
                expected_cf_units = cfunits.Units(expected_units)
                log = log.bind(units=units, expected_units=expected_units)
                if not cf_units.equivalent(expected_cf_units):
                    log.warning(
                        "'units' attribute not equivalent to the expected")
                elif not cf_units.equals(expected_cf_units):
                    log.warning("'units' attribute not equal to the expected")

    standard_name = attrs.get("standard_name")
    expected_standard_name = definition.get("standard_name")
    if expected_standard_name is not None:
        log = log.bind(expected_standard_name=expected_standard_name)
        if standard_name is None:
            log.warning("missing expected attribute 'standard_name'")
        elif standard_name != expected_standard_name:
            log.warning("'standard_name' attribute not valid",
                        standard_name=standard_name)
Ejemplo n.º 6
0
async def evaluate_pr(
    install: str,
    owner: str,
    repo: str,
    number: int,
    merging: bool,
    dequeue_callback: Callable[[], Awaitable[None]],
    requeue_callback: Callable[[], Awaitable[None]],
    queue_for_merge_callback: QueueForMergeCallback,
    is_active_merging: bool,
    log: structlog.BoundLogger,
) -> None:
    skippable_check_timeout = 4
    api_call_retries_remaining = 5
    api_call_errors = []  # type: List[APICallError]
    log = log.bind(owner=owner, repo=repo, number=number, merging=merging)
    while True:
        log.info("get_pr")
        try:
            pr = await asyncio.wait_for(
                get_pr(
                    install=install,
                    owner=owner,
                    repo=repo,
                    number=number,
                    dequeue_callback=dequeue_callback,
                    requeue_callback=requeue_callback,
                    queue_for_merge_callback=queue_for_merge_callback,
                ),
                timeout=60,
            )
            if pr is None:
                log.info("failed to get_pr")
                return
            try:
                await asyncio.wait_for(
                    mergeable(
                        api=pr,
                        subscription=pr.event.subscription,
                        config=pr.event.config,
                        config_str=pr.event.config_str,
                        config_path=pr.event.config_file_expression,
                        app_id=conf.GITHUB_APP_ID,
                        repository=pr.event.repository,
                        pull_request=pr.event.pull_request,
                        branch_protection=pr.event.branch_protection,
                        review_requests=pr.event.review_requests,
                        bot_reviews=pr.event.bot_reviews,
                        contexts=pr.event.status_contexts,
                        check_runs=pr.event.check_runs,
                        commits=pr.event.commits,
                        valid_merge_methods=pr.event.valid_merge_methods,
                        merging=merging,
                        is_active_merge=is_active_merging,
                        skippable_check_timeout=skippable_check_timeout,
                        api_call_errors=api_call_errors,
                        api_call_retries_remaining=api_call_retries_remaining,
                    ),
                    timeout=60,
                )
                log.info("evaluate_pr successful")
            except RetryForSkippableChecks:
                if skippable_check_timeout > 0:
                    skippable_check_timeout -= 1
                    log.info("waiting for skippable checks to pass")
                    await asyncio.sleep(RETRY_RATE_SECONDS)
                    continue
            except PollForever:
                log.info("polling")
                await asyncio.sleep(POLL_RATE_SECONDS)
                continue
            except ApiCallException as e:
                # if we have some api exception, it's likely a temporary error that
                # can be resolved by calling GitHub again.
                if api_call_retries_remaining:
                    api_call_errors.append(
                        APICallError(
                            api_name=e.method,
                            http_status=str(e.status_code),
                            response_body=str(e.response),
                        )
                    )
                    api_call_retries_remaining -= 1
                    log.info("problem contacting remote api. retrying")
                    continue
                log.warning("api_call_retries_remaining", exc_info=True)
            return
        except asyncio.TimeoutError:
            # On timeout we add the PR to the back of the queue to try again.
            log.warning("mergeable_timeout", exc_info=True)
            await requeue_callback()