def bind_queue(connection, queue):
    """binds a queue to the bindings identified in the doc"""
    logger = logging.getLogger('amqp-dispatcher')
    logger.debug("Binding queue {0}".format(queue))
    bindings = queue.get('bindings')
    connection = proxy_connection(connection)
    ch = connection.channel(synchronous=True)
    ch = proxy_channel(ch)
    arg_spec = inspect.getargspec(ch.queue_bind)
    name = queue.get('queue')
    for binding in bindings:
        exchange = binding['exchange']
        key = binding['routing_key']
        logger.info("bind {0} to {1}:{2}".format(name, exchange, key))
        if 'nowait' in arg_spec.args:
            ch.queue_bind(name, exchange, key, nowait=False)
        else:
            ch.queue_bind(name, exchange, key)
def create_queue(connection, queue):
    """creates a queue synchronously"""
    logger = logging.getLogger('amqp-dispatcher')
    name = queue['queue']
    logger.info("Create queue {0}".format(name))
    durable = bool(queue.get('durable', True))
    auto_delete = bool(queue.get('auto_delete', False))
    exclusive = bool(queue.get('exclusive', False))

    passive = False
    nowait = False

    arguments = {}
    queue_args = [
        'x_dead_letter_exchange',
        'x_dead_letter_routing_key',
        'x_max_length',
        'x_expires',
        'x_message_ttl',
    ]

    for queue_arg in queue_args:
        key = queue_arg.replace('_', '-')
        if queue.get(queue_arg):
            arguments[key] = queue.get(queue_arg)

    connection = proxy_connection(connection)
    ch = connection.channel(synchronous=True)
    ch = proxy_channel(ch)
    ret = ch.queue_declare(
        queue=name,
        passive=passive,
        exclusive=exclusive,
        durable=durable,
        auto_delete=auto_delete,
        nowait=nowait,
        arguments=arguments
    )
    name, message_count, consumer_count = ret
    log_message = "Queue {0} - {1} messages and {1} consumers connected"
    logger.info(log_message.format(name, message_count, consumer_count))
def setup(logger_name, connector, connect_to_hosts):
    logger = logging.getLogger('amqp-dispatcher')

    args = get_args_from_cli()
    config = yaml.safe_load(open(args.config).read())

    startup_handler_str = config.get('startup_handler')
    if startup_handler_str is not None:
        startup_handler = load_module_object(startup_handler_str)
        startup_handler()
        logger.info('Startup handled')

    random_generator = random.SystemRandom()
    random_string = ''.join([
        random_generator.choice(string.ascii_lowercase) for i in range(10)
    ])
    connection_name = '{0}-{1}-{2}'.format(
        socket.gethostname(),
        os.getpid(),
        random_string,
    )

    hosts, user, password, vhost, port, heartbeat = parse_env()
    rabbit_logger = logging.getLogger(logger_name)
    rabbit_logger.setLevel(logging.INFO)
    conn = connect_to_hosts(
        connector,
        hosts,
        port=port,
        transport='gevent',
        user=user,
        password=password,
        vhost=vhost,
        logger=rabbit_logger,
        heartbeat=heartbeat,
        client_properties={
            'connection_name': connection_name,
        },
    )
    if conn is None:
        logger.warning("No connection -- returning")
        return

    queues = config.get('queues')
    if queues:
        create_and_bind_queues(conn, queues)

    conn = proxy_connection(conn)
    conn.add_on_close_callback(create_connection_closed_cb(conn))

    # Create message channel
    channel = conn.channel()
    channel = proxy_channel(channel)
    channel.add_close_listener(channel_closed_cb)

    for consumer in config.get('consumers', []):
        queue_name = consumer['queue']
        prefetch_count = consumer.get('prefetch_count', 1)
        consumer_str = consumer.get('consumer')
        consumer_count = consumer.get('consumer_count', 1)

        consumer_klass = load_consumer(consumer_str)
        consume_channel = conn.channel()
        consume_channel = proxy_channel(consume_channel)
        consume_channel.basic_qos(prefetch_count=prefetch_count)
        pool = ConsumerPool(
            consume_channel,
            consumer_klass,
            gevent.Greenlet,
            consumer_count
        )

        consume_channel.basic_consume(
            consumer_callback=pool.handle,
            queue=queue_name,
            no_ack=False,
        )
        gevent.sleep()

    message_pump_greenlet = gevent.Greenlet(
        message_pump_greenthread, conn)

    return message_pump_greenlet