Exemplo n.º 1
0
    async def _extract_pulls_from_stream(self, stream_name, installation):
        LOG.debug("read stream", stream_name=stream_name)
        messages = await self.redis.xrange(stream_name,
                                           count=config.STREAM_MAX_BATCH)
        statsd.histogram("engine.streams.size", len(messages))

        # Groups stream by pull request
        pulls = collections.OrderedDict()
        for message_id, message in messages:
            data = msgpack.unpackb(message[b"event"], raw=False)
            owner = data["owner"]
            repo = data["repo"]
            source = data["source"]
            if data["pull_number"] is not None:
                key = (owner, repo, data["pull_number"])
                group = pulls.setdefault(key, ([], []))
                group[0].append(message_id)
                group[1].append(source)
            else:
                logger = logs.getLogger(__name__, gh_repo=repo, gh_owner=owner)
                try:
                    messages.extend(await self._convert_event_to_messages(
                        stream_name, installation, owner, repo, source))
                except IgnoredException:
                    logger.debug("ignored error", exc_info=True)
                except StreamRetry:
                    raise
                except Exception:
                    # Ignore it, it will retried later
                    logger.error("failed to process incomplete event",
                                 exc_info=True)
                    continue

                await self.redis.xdel(stream_name, message_id)
        return pulls
Exemplo n.º 2
0
def run_engine(installation, owner, repo, pull_number, sources):
    logger = logs.getLogger(__name__,
                            gh_repo=repo,
                            gh_owner=owner,
                            gh_pull=pull_number)
    logger.debug("engine in thread start")
    try:
        sync_redis = utils.get_redis_for_cache()
        subscription = sub_utils.get_subscription(sync_redis,
                                                  installation["id"])
        logger.debug("engine get installation")
        with github.get_client(owner, repo, installation) as client:
            try:
                pull = client.item(f"pulls/{pull_number}")
            except httpx.HTTPClientSideError as e:
                if e.status_code == 404:
                    logger.debug("pull request doesn't exists, skipping it")
                    return
                raise

            if (pull["base"]["repo"]["private"]
                    and not subscription["subscription_active"]):
                logger.debug(
                    "pull request on private private repository without subscription, skipping it"
                )
                return

            engine.run(client, pull, sources)
    finally:
        logger.debug("engine in thread end")
Exemplo n.º 3
0
def get_pull_logger(pull):
    return logs.getLogger(
        __name__,
        gh_owner=pull["base"]["user"]["login"]
        if "user" in pull else "<unknown-yet>",
        gh_repo=(pull["base"]["repo"]["name"]
                 if "base" in pull else "<unknown-yet>"),
        gh_private=(pull["base"]["repo"]["private"]
                    if "base" in pull else "<unknown-yet>"),
        gh_branch=pull["base"]["ref"] if "base" in pull else "<unknown-yet>",
        gh_pull=pull["number"],
        gh_pull_url=pull.get("html_url", "<unknown-yet>"),
        gh_pull_state=("merged" if pull.get("merged") else
                       (pull.get("mergeable_state", "unknown") or "none")),
    )
Exemplo n.º 4
0
    async def _consume_pulls(self, stream_name, installation, pulls):
        LOG.debug("stream contains %d pulls",
                  len(pulls),
                  stream_name=stream_name)
        for (owner, repo, pull_number), (message_ids,
                                         sources) in pulls.items():
            statsd.histogram("engine.streams.batch-size", len(sources))
            logger = logs.getLogger(__name__,
                                    gh_repo=repo,
                                    gh_owner=owner,
                                    gh_pull=pull_number)

            try:
                logger.debug("engine start with %s sources", len(sources))
                start = time.monotonic()
                await self._run_engine_and_translate_exception_to_retries(
                    installation, owner, repo, pull_number, sources)
                await self.redis.execute_command("XDEL", stream_name,
                                                 *message_ids)
                end = time.monotonic()
                logger.debug("engine finished in %s sec", end - start)
            except IgnoredException:
                await self.redis.execute_command("XDEL", stream_name,
                                                 *message_ids)
                logger.debug("failed to process pull request, ignoring",
                             exc_info=True)
            except MaxPullRetry as e:
                await self.redis.execute_command("XDEL", stream_name,
                                                 *message_ids)
                logger.error(
                    "failed to process pull request, abandoning",
                    attempts=e.attempts,
                    exc_info=True,
                )
            except PullRetry as e:
                logger.info(
                    "failed to process pull request, retrying",
                    attempts=e.attempts,
                    exc_info=True,
                )
            except StreamRetry:
                raise
            except Exception:
                # Ignore it, it will retried later
                logger.error("failed to process pull request", exc_info=True)
Exemplo n.º 5
0
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

import re

from datadog import statsd
import httpx
import voluptuous

from mergify_engine import actions
from mergify_engine import config
from mergify_engine import logs


LOG = logs.getLogger(__name__)

COMMAND_MATCHER = re.compile(r"@Mergify(?:|io) (\w*)(.*)", re.IGNORECASE)
COMMAND_RESULT_MATCHER = re.compile(r"\*Command `([^`]*)`: (pending|success|failure)\*")

UNKNOWN_COMMAND_MESSAGE = "Sorry but I didn't understand the command."
WRONG_ACCOUNT_MESSAGE = "_Hey, I reacted but my real name is @Mergifyio_"


def load_action(message):
    """Load an action from a message.

    :return: A tuple with 3 values: the command name, the commands args and the action."""
    action_classes = actions.get_commands()
    match = COMMAND_MATCHER.search(message)
    if match and match[1] in action_classes:
Exemplo n.º 6
0
 def log(self):
     return logs.getLogger(
         __name__, gh_owner=self.owner, gh_repo=self.repo, gh_branch=self.ref
     )