Beispiel #1
0
def test_data_iterator_event(beacon, redis_data_conn, scan_tmpdir):
    def iterate_channel_events(scan_db_name, channels):
      for e, n in DataNodeIterator(get_node(scan_db_name)).walk_events():
        if n.type == 'channel':
          channels[n.name] = n.get(0, -1)
  
    scan_saving = getattr(setup_globals, "SCAN_SAVING")
    scan_saving.base_path=str(scan_tmpdir)
    parent = scan_saving.get_parent_node()
    m = getattr(setup_globals, "roby")
    m.velocity(10)
    diode = getattr(setup_globals, "diode")
    npts = 5
    chain = AcquisitionChain()
    chain.add(SoftwarePositionTriggerMaster(m, 0, 1, npts), SamplingCounterAcquisitionDevice(diode, 0.01, npoints=npts))

    s = Scan(chain, "test_scan", parent)

    channels_data = dict()
    iteration_greenlet = gevent.spawn(iterate_channel_events, s.node.db_name, channels_data)

    s.run()
    
    time.sleep(0.1)
    iteration_greenlet.kill()

    assert set(('roby', 'diode')) == set(channels_data.keys())
    assert len(channels_data['roby']) == npts
    assert len(channels_data['diode']) == npts

    for n in DataNodeIterator(get_node(s.node.db_name)).walk_from_last(filter='channel', wait=False):
      assert n.get(0, -1) == channels_data[n.name]
    assert isinstance(n, ChannelDataNode)
Beispiel #2
0
def test_data_iterator(beacon, redis_data_conn):
    session = beacon.get("test_session")
    redis_keys = set(redis_scan(session.name+"*", connection=redis_data_conn))
    session_node = get_node(session.name)
    db_names = set([n.db_name for n in DataNodeIterator(session_node).walk(wait=False)])
    assert len(db_names) > 0
    assert db_names == redis_keys.intersection(db_names)
Beispiel #3
0
def test_reference_with_lima(beacon, redis_data_conn, scan_tmpdir, lima_simulator):
    session = beacon.get("lima_test_session")
    session.setup()
    setup_globals.SCAN_SAVING.base_path=str(scan_tmpdir)
    lima_sim = getattr(setup_globals, "lima_simulator")
    timescan = scans.timescan(0.1, lima_sim, npoints=3, return_scan=True)

    redis_keys = set(redis_scan(session.name+"*", connection=redis_data_conn))
    session_node = get_node(session.name)
    db_names = set([n.db_name for n in DataNodeIterator(session_node).walk(wait=False)])

    image_node_db_name = '%s:timer:Simulator:image' % timescan.node.db_name
    assert  image_node_db_name in db_names
 
    live_ref_status = QueueObjSetting("%s_data" % image_node_db_name, connection=redis_data_conn)[0]
    assert live_ref_status['last_image_saved'] == 2 #npoints-1
Beispiel #4
0
def _get_session_scans(session):
    if hasattr(session, "name"):
        session_node = session
        session_name = session.name
    else:
        session_node = get_node(session)
        session_name = session
    db_names = rdsscan(
        f"{session_name}:*_children_list",
        count=1000000,
        connection=session_node.db_connection,
    )
    # we are interested on actual scans, therefore we do not take scans
    # whose name starts by underscore
    return ( node
             for node in get_nodes(
                *(db_name.replace("_children_list", "") for db_name in db_names)
             )
             if node is not None and node.type == "scan" and \
                 hasattr(node.name, "startswith") and not node.name.startswith("_")
            )
Beispiel #5
0
def test_iterator_over_reference_with_lima(beacon, redis_data_conn, scan_tmpdir, lima_simulator):
    npoints = 5
    exp_time = 1

    session = beacon.get("lima_test_session")
    session.setup()
    setup_globals.SCAN_SAVING.base_path=str(scan_tmpdir)
    lima_sim = getattr(setup_globals, "lima_simulator")

    scan_greenlet = gevent.spawn(scans.timescan, exp_time, lima_sim, npoints=npoints)
    
    gevent.sleep(exp_time) #sleep time to let time for session creation

    session_node = get_node(session.name)
    iterator = DataNodeIterator(session_node)

    with gevent.Timeout((npoints+1)*exp_time):
        for event_type, node in iterator.walk_events(filter='lima'):
            if event_type == DataNodeIterator.NEW_DATA_IN_CHANNEL_EVENT:
                view = node.get(from_index=0, to_index=-1)
                if len(view) == npoints:
                    break

    view_iterator = iter(view)
    img0 = view_iterator.next()

    # make another scan -> this should make a new buffer on Lima server,
    # so images from previous view cannot be retrieved from server anymore
    scans.timescan(exp_time, lima_sim, npoints=1)
   
    view_iterator2 = iter(view)

    # retrieve from file
    if EdfFile is None:
        # no Edf file module => we just check data is retrieved from file, not
        # from server memory
        assert pytest.raises(RuntimeError, view_iterator2.next)
    else:
        assert view_iterator2.next() == img0
Beispiel #6
0
def get_node_list(node,
                  node_type=None,
                  name=None,
                  db_name=None,
                  dimension=None,
                  filter=None,
                  unique=False,
                  reverse=False,
                  ignore_underscore=True):
    """
    Return list of nodes matching the given filter
    """
    if not hasattr(node, "name"):
        input_node = get_node(node)
    else:
        input_node = node
    if hasattr(input_node, "walk"):
        if reverse:
            iterator = input_node.walk_from_last
        else:
            iterator = input_node.walk
    else:
        if reverse:
            iterator = input_node.iterator.walk_from_last
        else:
            iterator = input_node.iterator.walk
    if node_type:
        if hasattr(node_type, "lower"):
            node_type = node_type.lower()
            if node_type not in _NODE_TYPES:
                _logger.warning("Node type %s ignored" % node_type)
                node_type = None

    output_list = []
    # walk not waiting
    if node_type or name or db_name or dimension:
        for node in iterator(wait=False, include_filter=filter):
            if ignore_underscore and hasattr(
                    node.name, "startswith") and node.name.startswith("_"):
                continue
            if not _check_dimension(node, dimension):
                continue
            if node_type and (node.type == node_type):
                output_list.append(node)
            elif name and (node.name == name):
                output_list.append(node)
            elif db_name and node.db_name == db_name:
                output_list.append(node)
            else:
                output_list.append(node)
            if unique and len(output_list):
                break
    else:
        for node in iterator(wait=False, include_filter=filter):
            #print(node.name, node.db_name, node)
            if ignore_underscore and hasattr(
                    node.name, "startswith") and node.name.startswith("_"):
                continue
            output_list.append(node)
            if unique:
                break
    return output_list
Beispiel #7
0
 scan_number = None
 reference = None
 _logger.setLevel(logging.DEBUG)
 if len(sys.argv) > 1:
     sessions = [sys.argv[1]]
     if len(sys.argv) > 2:
         scan_number = sys.argv[2]
 else:
     sessions = get_sessions_list()
 for session_name in sessions:
     print("SESSION <%s>" % session_name)
     #connection = client.get_redis_connection(db=1)
     #while not DataNode.exists(session_name, None, connection):
     #    gevent.sleep(1)
     # get node
     session_node = get_node(session_name)
     if not session_node:
         print("\tNot Available")
         continue
     scans = get_session_scan_list(session_node)
     for scan in scans:
         filenames = get_filenames(scan)
         nFiles = len(filenames)
         if not nFiles:
             filename = "No FILE"
         else:
             if nFiles > 1:
                 print("WARNING, more than one file associated to scan")
             filename = filenames[-1]
         sInfo = scan_info(scan)
         print(list(sInfo.keys()))
Beispiel #8
0
def test_dm_client():
    # run previous test before!
    toto = get_node("toto")
    _walk_children(toto)
Beispiel #9
0
 def iterate_channel_events(scan_db_name, channels):
   for e, n in DataNodeIterator(get_node(scan_db_name)).walk_events():
     if n.type == 'channel':
       channels[n.name] = n.get(0, -1)