Beispiel #1
0
from bluesky_queueserver.plan import configure_plan

import databroker
import happi
import happi.loader

ip = IPython.get_ipython()

hclient = happi.Client(path='/usr/local/share/happi/test_db.json')
db = databroker.catalog['MAD']

RE = RunEngine()
bec = BestEffortCallback()

zmq_publisher = zmqPublisher("127.0.0.1:4567")
kafka_publisher = kafkaPublisher(
    topic="mad.bluesky.documents",
    bootstrap_servers="127.0.0.1:9092",
    key="kafka-unit-test-key",
    # work with a single broker
    producer_config={
        "acks": 1,
        "enable.idempotence": False,
        "request.timeout.ms": 5000,
    },
    serializer=partial(msgpack.dumps, default=mpn.encode),
)

logger = logging.getLogger("databroker")
logger.setLevel("DEBUG")
# publish 0MQ messages at XPD from xf28id2-ca1:5577
# subscribe to 0MQ messages at XPD from xf28id2-ca1:5578
arg_parser.add_argument("--zmq-host", type=str, default="xf28id2-ca1")
arg_parser.add_argument("--zmq-publish-port", type=int, default=5577)
arg_parser.add_argument("--zmq-publish-prefix", type=str, default="rr")
arg_parser.add_argument("--zmq-subscribe-port", type=int, default=5578)
arg_parser.add_argument("--zmq-subscribe-prefix", type=str, default="an")

args = arg_parser.parse_args()

pprint.pprint(vars(args))

# this process listens for 0MQ messages with prefix "an" (from xpdan)
d = RemoteDispatcher(
    f"{args.zmq_host}:{args.zmq_subscribe_port}",
    prefix=args.zmq_subscribe_prefix.encode(),
    #deserializer=womp_womp,
)

zmq_publisher = zmqPublisher(f"{args.zmq_host}:{args.zmq_publish_port}",
                             prefix=args.zmq_publish_prefix.encode())
peak_location = (2.63, 2.7)
rr = RunRouter([xpdan_result_picker_factory(zmq_publisher, peak_location)])
d.subscribe(rr)

print(
    f"ROI REDUCTION CONSUMER IS LISTENING ON {args.zmq_subscribe_prefix.encode()}"
)
print(f"AND PUBLISHING ON {args.zmq_publish_prefix.encode()}")
d.start()
Beispiel #3
0
logger = logging.getLogger("databroker")
logger.setLevel("DEBUG")
handler = logging.StreamHandler()
handler.setLevel("DEBUG")
logger.addHandler(handler)

ip = IPython.get_ipython()

hclient = happi.Client(path="/usr/local/share/happi/test_db.json")
db = databroker.catalog["MAD"]

RE = RunEngine()
bec = BestEffortCallback()

# xpdan listens for 0MQ messages with prefix "raw"
zmq_publisher = zmqPublisher(address="127.0.0.1:4567", prefix=b"raw")


RE.subscribe(zmq_publisher)
RE.subscribe(bec)


class RedisQueue:
    def __init__(self, client):
        self.client = client

    def put(self, value):
        self.client.lpush("adaptive", json.dumps(value))

    def get(self, timeout=0, block=True):
        if block: