Exemplo n.º 1
0
def _in_memory_publish(topic, msg):
    """ Puts the message into the in memory work queue. """
    # Increment the message ID.
    global _in_memory_msg_id
    _in_memory_msg_id += 1

    config = conf.messaging_backends['in_memory']

    # Create fake fedmsg from the message so we can reuse
    # the BaseEvent.from_fedmsg code to get the particular BaseEvent
    # class instance.
    wrapped_msg = BaseEvent.from_fedmsg(
        config['SERVICE'] + "." + topic,
        {"msg_id": str(_in_memory_msg_id), "msg": msg},
    )

    # Put the message to queue.
    from freshmaker.consumer import work_queue_put
    try:
        work_queue_put(wrapped_msg)
    except ValueError as e:
        log.warning("No FreshmakerConsumer found.  Shutting down?  %r" % e)
    except AttributeError:
        # In the event that `moksha.hub._hub` hasn't yet been initialized, we
        # need to store messages on the side until it becomes available.
        # As a last-ditch effort, try to hang initial messages in the config.
        log.warning("Hub not initialized.  Queueing on the side.")
        _initial_messages.append(wrapped_msg)
Exemplo n.º 2
0
    def _fake_build_container(self, source_url, build_target, build_opts):
        """
        Fake KojiSession.buildContainer method used dry run mode.

        Logs the arguments and emits BrewContainerTaskStateChangeEvent of
        CLOSED state.

        :rtype: number
        :return: Fake task_id.
        """
        log.info("DRY RUN: Calling fake buildContainer with args: %r",
                 (source_url, build_target, build_opts))

        # Get the task_id
        KojiService._FAKE_TASK_ID -= 1
        task_id = KojiService._FAKE_TASK_ID

        # Parse the source_url to get the name of container and generate
        # fake event.
        m = re.match(r".*/(?P<container>[^#]*)", source_url)
        container = m.group('container')
        event = BrewContainerTaskStateChangeEvent("fake_koji_msg_%d" % task_id,
                                                  container,
                                                  build_opts["git_branch"],
                                                  build_target, task_id,
                                                  "BUILDING", "CLOSED")
        event.dry_run = self.dry_run

        # Inject the fake event.
        log.info("DRY RUN: Injecting fake event: %r", event)
        work_queue_put(event)

        return task_id
Exemplo n.º 3
0
    def _fake_odcs_new_compose(self,
                               compose_source,
                               tag,
                               packages=None,
                               results=None,
                               builds=None,
                               arches=None):
        """
        Fake odcs.new_compose(...) method used in the dry run mode.

        Logs the arguments and emits fake ODCSComposeStateChangeEvent

        :rtype: dict
        :return: Fake odcs.new_compose dict.
        """
        self.handler.log_info(
            "DRY RUN: Calling fake odcs.new_compose with args: %r",
            (compose_source, tag, packages, results, arches))

        # In case we run in DRY_RUN mode, we need to initialize
        # FAKE_COMPOSE_ID to the id of last ODCS compose to give the IDs
        # increasing and unique even between Freshmaker restarts.
        fake_compose_id = Compose.get_lowest_compose_id(db.session) - 1
        if fake_compose_id >= 0:
            fake_compose_id = -1

        new_compose = {
            'id': fake_compose_id,
            'result_repofile':
            "http://localhost/{}.repo".format(fake_compose_id),
            'state': COMPOSE_STATES['done'],
            'results': results or ['boot.iso']
        }
        if builds:
            new_compose['builds'] = builds
        if arches:
            new_compose['arches'] = arches

        # Generate and inject the ODCSComposeStateChangeEvent event.
        event = ODCSComposeStateChangeEvent("fake_compose_msg", new_compose)
        event.dry_run = True
        self.handler.log_info("Injecting fake event: %r", event)
        work_queue_put(event)

        return new_compose
Exemplo n.º 4
0
    def check_unfinished_koji_tasks(self, session):
        stale_date = datetime.utcnow() - timedelta(days=7)
        db_events = session.query(models.Event).filter(
            models.Event.state == EventState.BUILDING.value,
            models.Event.time_created >= stale_date).all()

        for db_event in db_events:
            for build in db_event.builds:
                if build.state != ArtifactBuildState.BUILD.value:
                    continue
                if build.build_id <= 0:
                    continue
                with koji_service(
                        conf.koji_profile, log, login=False) as koji_session:
                    task = koji_session.get_task_info(build.build_id)
                    task_states = {v: k for k, v in koji.TASK_STATES.items()}
                    new_state = task_states[task["state"]]
                    if new_state not in ["FAILED", "CLOSED"]:
                        continue
                    event = BrewContainerTaskStateChangeEvent(
                        "fake event", build.name, None, None, build.build_id,
                        "BUILD", new_state)
                    work_queue_put(event)