Example #1
0
def search():
    """Add two numbers server side, ridiculous but well..."""
    db = MDSRO({'host': 'localhost', 'port': 27017,
           'database': 'test',
           'timezone': 'US/Eastern'})
    q = request.args.get('q', '')
    if not q:
        return jsonify(result={'runs': [], 'count': 0})
    query = {'$text': {'$search': q}}
    docs = list(db.find_run_starts(**query))
    count = len(docs)
    if len(docs) > 10:
        docs = docs[:10]
    runs = [render_template('header.html', run=run, abbrev_uid=run['uid'][:8])
            for run in docs]
    return jsonify(result={'runs': runs, 'count': count})
Example #2
0
def search():
    """Add two numbers server side, ridiculous but well..."""
    db = MDSRO({
        'host': 'localhost',
        'port': 27017,
        'database': 'test',
        'timezone': 'US/Eastern'
    })
    q = request.args.get('q', '')
    if not q:
        return jsonify(result={'runs': [], 'count': 0})
    query = {'$text': {'$search': q}}
    docs = list(db.find_run_starts(**query))
    count = len(docs)
    if len(docs) > 10:
        docs = docs[:10]
    runs = [
        render_template('header.html', run=run, abbrev_uid=run['uid'][:8])
        for run in docs
    ]
    return jsonify(result={'runs': runs, 'count': count})
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS


source_config = {'host': 'localhost',
                 'port': 27017,
                 'database': 'metadatastore_production_v1',
                 'timezone': 'US/Eastern'}
dest_config = {'directory': 'some_directory',
               'timezone': 'US/Eastern'}

source = MDSRO(source_config)  # a read-only metadatastore object
dest = MDS(dest_config)

for run_start in source.find_run_starts():
    dest.insert_run_start(**run_start)
    for desc in source.find_descriptors(run_start=run_start):
        events = source.get_events_generator(descriptor=desc)
        dest.insert_descriptor(**desc)
        dest.bulk_insert_events(desc, events)
    dest.insert_run_stop(**source.stop_by_start(run_start))
Example #4
0
        if o['run_start']['beamline_id'] in ['CSX', 'xf23id', 'CSX-1']:
            pass
        else:
            print(o)
            print(n)
            raise

old_config = dict(database=OLD_DATABASE,
                  host='localhost',
                  port=27017,
                  timezone='US/Eastern')
new_config = old_config.copy()

new_config['database'] = NEW_DATABASE

old = MDSRO(version=0, config=old_config)
new = MDS(version=1, config=new_config)

total = old._runstart_col.find().count()
old_starts = tqdm(old.find_run_starts(), unit='start docs', total=total,
                  leave=True)
new_starts = new.find_run_starts()
for o, n in zip(old_starts, new_starts):
    compare(o, n)

total = old._runstop_col.find().count()
old_stops = tqdm(old.find_run_stops(), unit='stop docs', total=total)
new_stops = new.find_run_stops()
for o, n in zip(old_stops, new_stops):
    compare(o, n)
descs = deque()
Example #5
0
from metadatastore.mds import MDSRO
from portable_mds.mongoquery.mds import MDS

source_config = {
    'host': 'localhost',
    'port': 27017,
    'database': 'metadatastore_production_v1',
    'timezone': 'US/Eastern'
}
dest_config = {'directory': 'some_directory', 'timezone': 'US/Eastern'}

source = MDSRO(source_config)  # a read-only metadatastore object
dest = MDS(dest_config)

for run_start in source.find_run_starts():
    dest.insert_run_start(**run_start)
    for desc in source.find_descriptors(run_start=run_start):
        events = source.get_events_generator(descriptor=desc)
        dest.insert_descriptor(**desc)
        dest.bulk_insert_events(desc, events)
    dest.insert_run_stop(**source.stop_by_start(run_start))
Example #6
0
def main(target, source):
    print('Database to be migrated {}'.format(source))
    print('Database migrated to {}'.format(target))
    target = target
    rc = ipp.Client()

    dview = rc[:]

    with dview.sync_imports():
        from metadatastore.mds import MDS, MDSRO
        from collections import deque

    old_config = {
        'database': source,
        'host': 'localhost',
        'port': 27017,
        'timezone': 'US/Eastern'}
    new_config = {
        'database': target,
        'host': 'localhost',
        'port': 27017,
        'timezone': 'US/Eastern'}

    old_t = MDSRO(version=0, config=old_config)
    new_t = MDS(version=1, config=new_config)

    def condition_config():
        import time

        global new, old
        for md in [new, old]:
            md._runstart_col.find_one()
            md._runstop_col.find_one()
            md._event_col.find_one()
            md._descriptor_col.find_one()
        time.sleep(1)

    def invasive_checks():
        global old, new
        return (old._MDSRO__conn is None,
                new._MDSRO__conn is None)

    dview.push({'old': old_t, 'new': new_t})
    dview.apply(condition_config)
    print(list(dview.apply(invasive_checks)))
    new_t._connection.drop_database(target)

    # Drop all indexes on event collection to speed up insert.
    # They will be rebuilt the next time an MDS(RO) object connects.
    new_t._event_col.drop_indexes()
    new = new_t
    old = old_t
    # old._runstart_col.drop_indexes()
    total = old._runstart_col.find().count()
    for start in tqdm(old.find_run_starts(), desc='start docs', total=total):
        new.insert('start', start)

    total = old._runstop_col.find().count()
    for stop in tqdm(old.find_run_stops(), desc='stop docs', total=total):
        try:
            new.insert('stop', stop)
        except RuntimeError:
            print("error inserting run stop with uid {!r}".format(stop['uid']))

    descs = deque()
    counts = deque()
    old._descriptor_col.drop_indexes()
    total = old._descriptor_col.find().count()
    for desc in tqdm(old.find_descriptors(), unit='descriptors', total=total):
        d_raw = next(old._descriptor_col.find({'uid': desc['uid']}))
        num_events = old._event_col.find(
            {'descriptor_id': d_raw['_id']}).count()
        new.insert('descriptor', desc)
        out = dict(desc)
        out['run_start'] = out['run_start']['uid']
        descs.append(dict(desc))
        counts.append(num_events)

    new.clear_process_cache()
    old.clear_process_cache()

    def migrate_event_stream(desc_in, num_events):
        import pymongo.errors
        import time

        global new, old
        if num_events:
            flag = True
            # skip empty event stream of bulk insert raises
            while flag:
                flag = False
                try:
                    events = old.get_events_generator(descriptor=desc_in,
                                                      convert_arrays=False)
                    events = iter(events)
                    l_cache = deque()
                    while True:
                        try:
                            for j in range(5000):
                                l_cache.append(next(events))
                        except StopIteration:
                            break
                        finally:
                            if l_cache:
                                new.bulk_insert_events(descriptor=desc_in,
                                                       events=l_cache)
                            l_cache.clear()
                except KeyError:
                    print("here here, key error")
                except pymongo.errors.AutoReconnect:
                    flag = True
                    time.sleep(10)

        new.clear_process_cache()
        old.clear_process_cache()
        return num_events

    v = rc.load_balanced_view()
    amr = v.map(migrate_event_stream, descs, list(counts), ordered=False)
    total = sum(counts)
    with tqdm(total=total, unit='events') as pbar:
        for res in amr:
            pbar.update(res)
Example #7
0
            pass
        else:
            print(o)
            print(n)
            raise


old_config = dict(database=OLD_DATABASE,
                  host='localhost',
                  port=27017,
                  timezone='US/Eastern')
new_config = old_config.copy()

new_config['database'] = NEW_DATABASE

old = MDSRO(version=0, config=old_config)
new = MDS(version=1, config=new_config)

total = old._runstart_col.find().count()
old_starts = tqdm(old.find_run_starts(),
                  unit='start docs',
                  total=total,
                  leave=True)
new_starts = new.find_run_starts()
for o, n in zip(old_starts, new_starts):
    compare(o, n)

total = old._runstop_col.find().count()
old_stops = tqdm(old.find_run_stops(), unit='stop docs', total=total)
new_stops = new.find_run_stops()
for o, n in zip(old_stops, new_stops):