def test_document_funcs_for_smoke(): global run_start_uid, descriptor_uid # todo this next line will break once NSLS-II/metadatastore#142 is merged run_start, = find_run_starts(uid=run_start_uid) descriptors = [desc for desc in find_event_descriptors(uid=descriptor_uid)] run_stop, = find_run_stops(uid=run_stop_uid) documents = [run_start, run_stop] documents.extend(descriptors) attrs = ['__repr__', '__str__', '_repr_html_', ] for doc, attr in product(documents, attrs): getattr(doc, attr)()
def mock_run_start(run_start_uid=None, sleep=0, make_run_stop=True): if run_start_uid is None: blc_uid = insert_beamline_config({}, time=get_time()) run_start_uid = insert_run_start(time=get_time(), scan_id=1, beamline_id='example', uid=str(uuid.uuid4()), beamline_config=blc_uid) # these events are already the sanitized version, not raw mongo objects events = func(run_start_uid, sleep) # Infer the end run time from events, since all the times are # simulated and not necessarily based on the current time. time = max([event['time'] for event in events]) if make_run_stop: run_stop_uid = insert_run_stop(run_start_uid, time=get_time(), exit_status='success') run_stop, = find_run_stops(uid=run_stop_uid) return events
def mock_run_start(run_start_uid=None, sleep=0, make_run_stop=True): if run_start_uid is None: run_start_uid = insert_run_start(time=get_time(), scan_id=1, beamline_id='example', uid=str(uuid.uuid4())) # these events are already the sanitized version, not raw mongo objects events = func(run_start_uid, sleep) # Infer the end run time from events, since all the times are # simulated and not necessarily based on the current time. time = max([event['time'] for event in events]) if make_run_stop: run_stop_uid = insert_run_stop(run_start_uid, time=time, exit_status='success', uid=str(uuid.uuid4())) run_stop, = find_run_stops(uid=run_stop_uid) return events
def from_run_start(cls, run_start, verify_integrity=True): """ Build a Header from a RunStart Document. Parameters ---------- run_start : metadatastore.document.Document Returns ------- header : dataportal.broker.Header """ header = Header() header._name = "Header" header.event_descriptors = list( find_event_descriptors(run_start=run_start)) run_stops = list(find_run_stops(run_start=run_start.uid)) try: run_stop, = run_stops except ValueError: num = len(run_stops) run_stop = None if num == 0: error_msg = ("A RunStop record could not be found for the " "run with run_start_uid {0}".format(run_start.uid)) warnings.warn(error_msg) else: error_msg = ( "{0} RunStop records (uids {1}) were found for the run " "with run_start_uid {2}".format( num, [rs.uid for rs in run_stops], run_start.uid)) if verify_integrity: raise IntegrityError(error_msg) else: warnings.warn(error_msg) # Map keys from RunStart and RunStop onto Header. run_start_renames = {'start_time': 'time', 'start_datetime': 'time_as_datetime', 'scan_id': 'scan_id', 'beamline_id': 'beamline_id', 'owner': 'owner', 'group': 'group', 'project': 'project', 'run_start_id': 'id', 'run_start_uid': 'uid'} run_start_renames_back = {v: k for k, v in run_start_renames.items()} for k in run_start: new_key = run_start_renames_back.get(k, k) header[new_key] = run_start[k] if run_stop is not None: run_stop_renames = {'stop_time': 'time', 'stop_datetime': 'time_as_datetime', 'exit_reason': 'reason', 'exit_status': 'exit_status', 'run_stop_id': 'id', 'run_stop_uid': 'uid'} run_stop_renames_back = {v: k for k, v in run_stop_renames.items()} for k in run_stop: new_key = run_stop_renames_back.get(k, k) header[new_key] = run_stop[k] del header['run_start'] run_start._name = 'Header' # Strip unuseful recursion. We don't mess with underlying Documents, # but the header is a special case, and the repr should reflect # its structure accurately. for ev_desc in header.event_descriptors: ev_desc.run_start = ev_desc.run_start.uid return header