Esempio n. 1
0
def test_export(db_factory, example_data):
    """
    Test suitcase-mongo-embedded serializer with default parameters.
    """
    permanent_db = db_factory()
    serializer = Serializer(permanent_db)
    run(example_data, serializer, permanent_db)
    if not serializer._frozen:
        serializer.close()
Esempio n. 2
0
def test_smallpage(db_factory, example_data):
    """
    Test suitcase-mongo-embedded serializer with a small mongo page saize.
    """
    permanent_db = db_factory()
    serializer = Serializer(permanent_db, page_size=10000)
    run(example_data, serializer, permanent_db)
    if not serializer._frozen:
        serializer.close()
Esempio n. 3
0
def test_smallqueue(db_factory, example_data):
    """
    Test suitcase-mongo-embedded serializer with a small buffer.
    """
    permanent_db = db_factory()
    serializer = Serializer(permanent_db, queue_size=1)
    run(example_data, serializer, permanent_db)
    if not serializer._frozen:
        serializer.close()
Esempio n. 4
0
def test_multithread(db_factory, example_data):
    """
    Test suitcase-mongo-embedded serializer with multiple worker threads.
    """
    permanent_db = db_factory()
    serializer = Serializer(permanent_db, num_threads=5)
    run(example_data, serializer, permanent_db)
    if not serializer._frozen:
        serializer.close()
Esempio n. 5
0
def test_evil_db(db_factory, example_data):
    """
    Test suitcase-mongo-embedded serializer with a db that raises an exception
    on bulk_write.
    """
    def evil_func(*args, **kwargs):
        raise RuntimeError

    permanent_db = db_factory()
    serializer = Serializer(permanent_db)
    serializer._bulkwrite_event = evil_func
    serializer._bulkwrite_datum = evil_func
    with pytest.raises(RuntimeError):
        run(example_data, serializer, permanent_db)
    if not serializer._frozen:
        serializer.close()
def bundle(request, intake_server, example_data, db_factory):  # noqa
    fullname = os.path.join(TMP_DIR, YAML_FILENAME)
    permanent_db = db_factory()
    serializer = Serializer(permanent_db)
    uid, docs = example_data
    for name, doc in docs:
        serializer(name, doc)

    def extract_uri(db):
        return f'mongodb://{db.client.address[0]}:{db.client.address[1]}/{db.name}'

    with open(fullname, 'w') as f:
        f.write(f'''
plugins:
  source:
    - module: intake_bluesky
sources:
  xyz:
    description: Some imaginary beamline
    driver: intake_bluesky.mongo_embedded.BlueskyMongoCatalog
    container: catalog
    args:
      datastore_db: {extract_uri(permanent_db)}
      handler_registry:
        NPY_SEQ: ophyd.sim.NumpySeqHandler
    metadata:
      beamline: "00-ID"
        ''')

    time.sleep(2)

    if request.param == 'local':
        cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
    elif request.param == 'remote':
        cat = intake.Catalog(intake_server, page_size=10)
    else:
        raise ValueError
    return types.SimpleNamespace(cat=cat, uid=uid, docs=docs)
Esempio n. 7
0
# generate_data.py
import logging
from suitcase.mongo_embedded import Serializer
from bluesky import RunEngine
from bluesky.plans import count
from ophyd.sim import det
import pymongo
import uuid

from databroker._drivers.mongo_embedded import BlueskyMongoCatalog

RE = RunEngine()

uri = f'mongodb://localhost:27017/databroker-test-{uuid.uuid4()}'
database = pymongo.MongoClient(uri).get_database()
with Serializer(database) as serializer:
    RE(count([det]), serializer)
    # time.sleep(5)
with Serializer(database) as serializer:
    RE(count([det], 3), serializer)
    # time.sleep(5)

logger = logging.getLogger('databroker')
logger.setLevel('DEBUG')
handler = logging.StreamHandler()
handler.setLevel('DEBUG')
logger.addHandler(handler)

catalog = BlueskyMongoCatalog(uri)