def check_variable_data(data_var: xr.DataArray, log: structlog.BoundLogger = LOGGER) -> None: for dim in data_var.dims: if dim not in CDM_COORDS: log.warning(f"unknown dimension '{dim}'") elif dim not in data_var.coords: log.error(f"dimension with no associated coordinate '{dim}'")
async def update_pr_immediately_if_configured( m_res: MergeabilityResponse, event: EventInfoResponse, pull_request: PR, log: structlog.BoundLogger, ) -> None: if (m_res == MergeabilityResponse.NEEDS_UPDATE and isinstance(event.config, V1) and event.config.merge.update_branch_immediately): log.info("updating pull request") if not await update_pr_with_retry(pull_request): log.error("failed to update branch") await pull_request.set_status(summary="🛑 could not update branch")
def check_coordinate_data( coord_name: T.Hashable, coord: xr.DataArray, increasing: bool = True, log: structlog.BoundLogger = LOGGER, ) -> None: diffs = coord.diff(coord_name).values zero = 0 if coord.dtype.name in TIME_DTYPE_NAMES: zero = np.timedelta64(0, "ns") if increasing: if (diffs <= zero).any(): log.error("coordinate stored direction is not 'increasing'") else: if (diffs >= zero).any(): log.error("coordinate stored direction is not 'decreasing'")
def check_dataset_data_vars( dataset_data_vars: T.Mapping[T.Hashable, xr.DataArray], log: structlog.BoundLogger = LOGGER, ) -> T.Tuple[T.Dict[str, xr.DataArray], T.Dict[str, xr.DataArray]]: data_vars = sanitise_mapping(dataset_data_vars, log=log) payload_vars = {} ancillary_vars = {} for name, var in data_vars.items(): if name in CDM_ANCILLARY_VARS: ancillary_vars[name] = var else: payload_vars[name] = var if len(payload_vars) > 1: log.error( "file must have at most one non-auxiliary variable", data_vars=list(payload_vars), ) for data_var_name, data_var in payload_vars.items(): log = log.bind(data_var_name=data_var_name) check_variable(data_var_name, data_var, CDM_DATA_VARS, log=log) return payload_vars, ancillary_vars
async def process_webhook_event( connection: RedisConnection, webhook_queue: RedisWebhookQueue, queue_name: str, log: structlog.BoundLogger, ) -> None: log.info("block for new webhook event") webhook_event_json: BlockingZPopReply = await connection.bzpopmin( [queue_name]) webhook_event = WebhookEvent.parse_raw(webhook_event_json.value) async with Client( owner=webhook_event.repo_owner, repo=webhook_event.repo_name, installation_id=webhook_event.installation_id, ) as api_client: pull_request = PR( owner=webhook_event.repo_owner, repo=webhook_event.repo_name, number=webhook_event.pull_request_number, installation_id=webhook_event.installation_id, client=api_client, ) is_merging = (await connection.get( webhook_event.get_merge_target_queue_name() ) == webhook_event.json()) # trigger status updates m_res, event = await pull_request.mergeability() if event is None or m_res == MergeabilityResponse.NOT_MERGEABLE: # remove ineligible events from the merge queue await connection.zrem(webhook_event.get_merge_queue_name(), [webhook_event.json()]) return if m_res == MergeabilityResponse.SKIPPABLE_CHECKS: log.info("skippable checks") return await update_pr_immediately_if_configured(m_res, event, pull_request, log) if m_res not in ( MergeabilityResponse.NEEDS_UPDATE, MergeabilityResponse.NEED_REFRESH, MergeabilityResponse.WAIT, MergeabilityResponse.OK, MergeabilityResponse.SKIPPABLE_CHECKS, ): raise Exception("Unknown MergeabilityResponse") if isinstance(event.config, V1) and event.config.merge.do_not_merge: # we duplicate the status messages found in the mergeability # function here because status messages for WAIT and NEEDS_UPDATE # are only set when Kodiak hits the merging logic. if m_res == MergeabilityResponse.WAIT: await pull_request.set_status(summary="⌛️ waiting for checks") if m_res in { MergeabilityResponse.OK, MergeabilityResponse.SKIPPABLE_CHECKS, }: await pull_request.set_status(summary="✅ okay to merge") log.debug( "skipping merging for PR because `merge.do_not_merge` is configured." ) return if (isinstance(event.config, V1) and event.config.merge.prioritize_ready_to_merge and m_res == MergeabilityResponse.OK): merge_success = await pull_request.merge(event) if merge_success: return log.error("problem merging PR") # don't clobber statuses set in the merge loop # The following responses are okay to add to merge queue: # + NEEDS_UPDATE - okay for merging # + NEED_REFRESH - assume okay # + WAIT - assume checks pass # + OK - we've got the green webhook_event_jsons = await webhook_queue.enqueue_for_repo( event=webhook_event) if is_merging: return position = find_position(webhook_event_jsons, webhook_event_json.value) if position is None: return # use 1-based indexing humanized_position = inflection.ordinalize(position + 1) await pull_request.set_status( f"📦 enqueued for merge (position={humanized_position})")
async def process_repo_queue(log: structlog.BoundLogger, connection: RedisConnection, queue_name: str) -> None: log.info("block for new repo event") webhook_event_json: BlockingZPopReply = await connection.bzpopmin( [queue_name]) webhook_event = WebhookEvent.parse_raw(webhook_event_json.value) # mark this PR as being merged currently. we check this elsewhere to set proper status codes await connection.set(webhook_event.get_merge_target_queue_name(), webhook_event.json()) async with Client( owner=webhook_event.repo_owner, repo=webhook_event.repo_name, installation_id=webhook_event.installation_id, ) as api_client: pull_request = PR( owner=webhook_event.repo_owner, repo=webhook_event.repo_name, number=webhook_event.pull_request_number, installation_id=webhook_event.installation_id, client=api_client, ) # mark that we're working on this PR await pull_request.set_status(summary="⛴ attempting to merge PR") skippable_check_timeout = 4 while True: # there are two exits to this loop: # - OK MergeabilityResponse # - NOT_MERGEABLE MergeabilityResponse # # otherwise we continue to poll the Github API for a status change # from the other states: NEEDS_UPDATE, NEED_REFRESH, WAIT # TODO(chdsbd): Replace enum response with exceptions m_res, event = await pull_request.mergeability(merging=True) log = log.bind(res=m_res) if event is None or m_res == MergeabilityResponse.NOT_MERGEABLE: log.info("cannot merge") break if m_res == MergeabilityResponse.SKIPPABLE_CHECKS: if skippable_check_timeout: skippable_check_timeout -= 1 await asyncio.sleep(RETRY_RATE_SECONDS) continue await pull_request.set_status( summary="⌛️ waiting a bit for skippable checks") break if m_res == MergeabilityResponse.NEEDS_UPDATE: log.info("update pull request and don't attempt to merge") if await update_pr_with_retry(pull_request): continue log.error("failed to update branch") await pull_request.set_status( summary="🛑 could not update branch") # break to find next PR to try and merge break elif m_res == MergeabilityResponse.NEED_REFRESH: # trigger a git mergeability check on Github's end and poll for result log.info("needs refresh") await pull_request.trigger_mergeability_check() continue elif m_res == MergeabilityResponse.WAIT: # continuously poll until we either get an OK or a failure for mergeability log.info("waiting for status checks") continue elif m_res == MergeabilityResponse.OK: # continue to try and merge pass else: raise Exception("Unknown MergeabilityResponse") retries = 5 while retries: log.info("merge") if await pull_request.merge(event): # success merging break retries -= 1 log.info("retry merge") await asyncio.sleep(RETRY_RATE_SECONDS) else: log.error("Exhausted attempts to merge pull request")