def serve_forever(self):
     self._running = True
     try:
         while self._running:
             green.sleep(1)
     except (KeyboardInterrupt, SystemExit):
         self.shutdown()
def test_precision_by_config_u(config, clean_db, one_node):
    t_start = datetime.now()
    iv = IsacValue(one_node, 'test://test_precision/test_precision_by_config_u/test', survey_last_value=False, survey_static_tags=False)

    try:
        config['config']['default_ts_precision'] = 'u'
        archiver = arch.InfluxDBArchiver(config)

        expected_history = []
        def _save_point():
            expected_history.append(iv.value_ts_tags)

        base_ts = datetime.now()
        iv.value_ts = randint(0, 100), base_ts
        _save_point() # When the us happen to be the same, it take the first value, not the last one like for other precisions
        iv.value_ts = randint(0, 100), base_ts
        iv.value_ts = randint(0, 100), base_ts + timedelta(microseconds=1)
        _save_point()

        green.sleep(0.5)
        t_end = datetime.now()

        data = iv.get_history((t_start, t_end))
        assert data == expected_history

    finally:
        archiver.shutdown()
        config['config']['default_ts_precision'] = arch.DEFAULT_TS_PRECISION
Пример #3
0
def test_observer_metadata(two_nodes):
    nA, nB = two_nodes
    obs = Observer()

    try:
        nB.transport.join_event() # Necesarry, but not user friendly
        uri = 'test://test_isac_value/test_observer_metadata/test_observer'
        ivA = IsacValue(nA, uri, survey_last_value=False, survey_static_tags=False)
        ivB = IsacValue(nB, uri, survey_last_value=False, survey_static_tags=False)
        ivB.metadata_observers += obs.observer
        ivA.metadata = {'this': 'is', 'meta': 'data'}

        for i in range(10):
            green.sleep(0.5)
            if obs.args is not None:
                break

        assert obs.args, 'Callback not received'
        iv_recv, metadata, source_peer = obs.args
        assert iv_recv == ivB
        assert metadata == ivA.metadata
        assert source_peer['peer_name'] == nA.name
        assert source_peer['peer_uuid'] == str(nA.transport.uuid())

    finally:
        nB.transport.leave_event()
def test_update_value_tags(config, root_client, clean_db, one_node):
    iv = IsacValue(one_node, 'test://test_update/test_update_value_tags/test', static_tags={'static': 'tag'}, survey_last_value=False, survey_static_tags=False)

    try:
        archiver = arch.InfluxDBArchiver(config)

        iv.value = randint(0, 100)
        green.sleep(0.25)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1')
        for uri, points in stored_values.items():
            assert uri == iv.uri
            assert points[0]['value_int'] == iv.value
            compare_time(points[0]['time'], iv.timestamp)
            assert points[0]['d_peer_name'] == iv.isac_node.transport.name()
            assert points[0]['d_peer_uuid'] == str(iv.isac_node.transport.uuid())
            assert points[0]['s_static'] == iv.static_tags['static']

        iv.tags['test'] = str(randint(0, 100))
        iv.value = randint(0, 100)
        green.sleep(0.25)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1')
        for uri, points in stored_values.items():
            assert uri == iv.uri
            assert points[0]['value_int'] == iv.value
            compare_time(points[0]['time'], iv.timestamp)
            assert points[0]['d_test'] == iv.tags['test']
            assert points[0]['d_peer_name'] == iv.isac_node.transport.name()
            assert points[0]['d_peer_uuid'] == str(iv.isac_node.transport.uuid())
            assert points[0]['s_static'] == iv.static_tags['static']


    finally:
        archiver.shutdown()
Пример #5
0
def test_observer_metadata(two_nodes):  # noqa: F811
    nA, nB = two_nodes
    obs = Observer()

    try:
        nB.transport.join_event()  # Necesarry, but not user friendly
        uri = 'test://test_isac_value/test_observer_metadata/test_observer'
        ivA = IsacValue(nA,
                        uri,
                        survey_last_value=False,
                        survey_static_tags=False)
        ivB = IsacValue(nB,
                        uri,
                        survey_last_value=False,
                        survey_static_tags=False)
        ivB.metadata_observers += obs.observer
        green.sleep(0.25)
        ivA.metadata = {'this': 'is', 'meta': 'data'}

        for i in range(10):
            green.sleep(0.5)
            if obs.args is not None:
                break

        assert obs.args, 'Callback not received'
        iv_recv, metadata, source_peer = obs.args
        assert iv_recv == ivB
        assert metadata == ivA.metadata
        assert source_peer['peer_name'] == nA.name
        assert source_peer['peer_uuid'] == str(nA.transport.uuid())

    finally:
        nB.transport.leave_event()
Пример #6
0
    def __init__(self, name, context=zmq.Context.instance()):
        self.isac_values = WeakValueDictionary()  # Should be a weakdict

        self.rpc_regexp = re.compile('^rpc://(.*?)/(.*)$')
        self.rpc = ZmqRPC()
        self.pub_sub = ZmqPubSub(context, self._sub_callback)

        self.transport = PyreNode(name, context)
        try:
            self.surveys_manager = SurveysManager(self, self.transport)
            self.events_manager = EventsManager(self, self.transport)
        except Exception:
            self.transport.stop()
            raise

        self.transport.on_new_peer = self._on_new_peer
        self.transport.on_peer_gone = self._on_peer_gone
        self.transport.on_survey = self.surveys_manager.on_survey
        self.transport.on_event = self.events_manager.on_event

        self.rpc.setup_transport(self.transport)
        self.pub_sub.setup_transport(self.transport)

        self.rpc.start()
        self.pub_sub.start()

        self.transport.run()

        green.sleep(0.1)
Пример #7
0
def wait_cb(results, cb_name, timeout=10, step=0.1):
    for i in range(int(timeout / step)):
        green.sleep(step)
        if getattr(results, cb_name + '_results'):
            break
    assert getattr(results,
                   cb_name + '_results'), 'Callback %s not called' % cb_name
def test_precision_by_config_ms(config, clean_db, one_node):
    t_start = datetime.now()
    iv = IsacValue(one_node, 'test://test_precision/test_precision_by_config_ms/test', survey_last_value=False, survey_static_tags=False)

    try:
        config['config']['default_ts_precision'] = 'ms'
        archiver = arch.InfluxDBArchiver(config)

        expected_history = []
        def _save_point():
            value, ts, tags = iv.value_ts_tags
            ts = degrade_time(ts, precision='ms')
            expected_history.append((value, ts, tags))

        base_ts = datetime.now()
        while base_ts.microsecond > 500:
            green.sleep(0.0001)
            base_ts = datetime.now()

        iv.value_ts = randint(0, 100), base_ts
        iv.value_ts = randint(0, 100), base_ts + timedelta(microseconds=20)
        _save_point()
        iv.value_ts = randint(0, 100), base_ts + timedelta(microseconds=1000)
        _save_point()

        green.sleep(0.5)
        t_end = datetime.now()

        data = iv.get_history((t_start, t_end))
        assert data == expected_history

    finally:
        archiver.shutdown()
        config['config']['default_ts_precision'] = arch.DEFAULT_TS_PRECISION
Пример #9
0
    def shutdown(self):
        logger.debug('Shutting down transport')
        self.transport.shutdown()

        self.rpc.shutdown()
        self.pub_sub.shutdown()

        green.sleep(0.1)
Пример #10
0
 def serve_forever(self):
     self.start()
     try:
         while True:
             green.sleep(1)
     except (KeyboardInterrupt, SystemExit):
         logger.info('Stopping')
         self.stop()
def test_precision_by_metadata(config, clean_db, one_node):
    t_start = datetime.now()
    iv_s = IsacValue(one_node, 'test://test_precision/test_precision_by_metadata/test_s', metadata={'ts_precision': 's'}, survey_last_value=False, survey_static_tags=False)
    iv_ms = IsacValue(one_node, 'test://test_precision/test_precision_by_metadata/test_ms', metadata={'ts_precision': 'ms'}, survey_last_value=False, survey_static_tags=False)
    iv_u = IsacValue(one_node, 'test://test_precision/test_precision_by_metadata/test_u', metadata={'ts_precision': 'u'}, survey_last_value=False, survey_static_tags=False)

    try:
        archiver = arch.InfluxDBArchiver(config)

        iv_s_expected_history = []
        iv_ms_expected_history = []
        iv_u_expected_history = []
        def _save_point(iv, expected_history, precision):
            value, ts, tags = iv.value_ts_tags
            if precision in ['s', 'ms']:
                ts = degrade_time(ts, precision)
            expected_history.append((value, ts, tags))

        base_ts = datetime.now()
        while base_ts.microsecond > 500:
            green.sleep(0.0001)
            base_ts = datetime.now()

        # Second
        iv_s.value_ts = randint(0, 100), base_ts
        iv_s.value_ts = randint(0, 100), base_ts + timedelta(microseconds=20000)
        _save_point(iv_s, iv_s_expected_history, 's')
        iv_s.value_ts = randint(0, 100), base_ts + timedelta(seconds=1)
        _save_point(iv_s, iv_s_expected_history, 's')

        # Millisecond
        iv_ms.value_ts = randint(0, 100), base_ts
        iv_ms.value_ts = randint(0, 100), base_ts + timedelta(microseconds=20)
        _save_point(iv_ms, iv_ms_expected_history, 'ms')
        iv_ms.value_ts = randint(0, 100), base_ts + timedelta(microseconds=1000)
        _save_point(iv_ms, iv_ms_expected_history, 'ms')

        # Microsecond
        iv_u.value_ts = randint(0, 100), base_ts
        _save_point(iv_u, iv_u_expected_history, 'u') # When the us happen to be the same, it take the first value, not the last one like for other precisions
        iv_u.value_ts = randint(0, 100), base_ts
        iv_u.value_ts = randint(0, 100), base_ts + timedelta(microseconds=1)
        _save_point(iv_u, iv_u_expected_history, 'u')

        green.sleep(0.5)
        t_end = datetime.now() + timedelta(seconds=10)

        data_s = iv_s.get_history((t_start, t_end))
        assert data_s == iv_s_expected_history

        data_ms = iv_ms.get_history((t_start, t_end))
        assert data_ms == iv_ms_expected_history

        data_u = iv_u.get_history((t_start, t_end))
        assert data_u == iv_u_expected_history

    finally:
        archiver.shutdown()
def test_smoothing_by_metadata(config, clean_db, one_node):
    t_start = datetime.now()
    ivS = IsacValue(one_node, 'test://test_smoothing/test_smoothing_by_metadata/test_smoothing', metadata={'smoothing': True}, survey_last_value=False, survey_static_tags=False)
    ivNS = IsacValue(one_node, 'test://test_smoothing/test_smoothing_by_metadata/test_no_smoothing', survey_last_value=False, survey_static_tags=False)
    ivSF = IsacValue(one_node, 'test://test_smoothing/test_smoothing_by_metadata/test_smoothing_false', metadata={'smoothing': False}, survey_last_value=False, survey_static_tags=False)

    try:
        archiver = arch.InfluxDBArchiver(config)


        ivS_expected_history = []
        ivNS_expected_history = []
        ivSF_expected_history = []
        def _save_point(iv, record):
            value, ts, tags = iv.value_ts_tags
            ts = degrade_time(ts)
            record.append((value, ts, tags))

        def _save_all_points():
            _save_point(ivS, ivS_expected_history)
            _save_point(ivNS, ivNS_expected_history)
            _save_point(ivSF, ivSF_expected_history)

        def _save_not_smoothed_points():
            _save_point(ivNS, ivNS_expected_history)
            _save_point(ivSF, ivSF_expected_history)


        base = randint(0, 100)
        ivS.value = ivNS.value = ivSF.value = base
        _save_all_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value + 10
        _save_all_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value
        _save_not_smoothed_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value
        _save_not_smoothed_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value
        _save_not_smoothed_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value
        _save_all_points()
        ivS.value = ivNS.value = ivSF.value = ivS.value + 10
        _save_all_points()

        green.sleep(0.5)
        t_end = datetime.now()

        data_ivS = ivS.get_history((t_start, t_end))
        assert data_ivS == ivS_expected_history

        data_ivNS = ivNS.get_history((t_start, t_end))
        assert data_ivNS == ivNS_expected_history

        data_ivSF = ivSF.get_history((t_start, t_end))
        assert data_ivSF == ivSF_expected_history

    finally:
        archiver.shutdown()
Пример #13
0
def node_with_callbacks(request):
    n = PyreNode('testA')

    results = StoreResults()
    n.on_new_peer = results.on_new_peer
    n.on_peer_gone = results.on_peer_gone
    n.on_survey = results.on_survey
    n.on_event = results.on_event

    yield n, results

    n.shutdown()
    green.sleep(0.1)  # Good to wait all other nodes shutdown
Пример #14
0
def test_observer_at_creation(two_nodes):
    nA, nB = two_nodes
    obs = Observer()

    uri = 'test://test_isac_value/test_observer_at_creation/test_observer'
    ivA = IsacValue(nA, uri, randint(0, 100), static_tags={'this': 'is', 'static': 'tags'}, survey_last_value=False, survey_static_tags=False)
    ivB = IsacValue(nB, uri, observers=Observable([obs.observer]))
    green.sleep(0.5)
    assert obs.args, 'Callback not received'
    iv_recv, value, ts, tags = obs.args
    assert iv_recv == ivB
    assert value == ivA.value
    assert ts == ivA.timestamp
    assert tags == ivA.tags
    assert obs.static_tags == ivA.static_tags
def test_update_metadata(config, root_client, clean_db, one_node):
    iv = IsacValue(one_node, 'test://test_update/test_update_metadata/test', survey_last_value=False, survey_static_tags=False)

    try:
        archiver = arch.InfluxDBArchiver(config)

        iv.metadata = {'meta': 'data'}
        green.sleep(0.25)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1')
        for uri, points in stored_values.items():
            uri = uri.replace('metadata', points[0]['scheme'], 1)
            assert uri == iv.uri
            assert points[0]['meta'] == iv.metadata['meta']

    finally:
        archiver.shutdown()
Пример #16
0
def test_value_metadata_update_event(two_nodes):  # noqa: F811
    nA, nB = two_nodes
    fake_iv = FakeIsacValue()
    nB.transport.join_event()

    green.sleep(0.25)

    uri = 'test://test_event_value/test_value_metadata_update_event/test'
    nB.isac_values[uri] = fake_iv
    nA.event_value_metadata_update(uri, {'this is': 'metadata'},
                                   nA.name_uuid())

    green.sleep(0.25)

    assert fake_iv.metadata, 'Callback not called'
    assert fake_iv.metadata == {'this is': 'metadata'}
    assert fake_iv.source_peer['peer_name'] == nA.name
    assert fake_iv.source_peer['peer_uuid'] == str(nA.transport.uuid())
Пример #17
0
def test_detect_all_iv(config, root_client, clean_db, two_nodes):
    nA, nB = two_nodes
    db = config['archiver-user']['db']

    ivs = {}
    def _make_iv(node, uri):
        ivs[uri] = IsacValue(node, uri, static_tags={'nb': uri[-1]}, metadata={'leaf': uri[-4:]}, survey_last_value=False, survey_static_tags=False)
        ivs[uri].value = randint(0, 100)
    _make_iv(nA, 'test://test_init/test_detect_all_iv/iv1A')
    _make_iv(nA, 'test://test_init/test_detect_all_iv/iv2A')
    _make_iv(nB, 'test://test_init/test_detect_all_iv/iv1B')
    _make_iv(nB, 'test://test_init/test_detect_all_iv/iv2B')

    try:
        archiver = arch.InfluxDBArchiver(config)
        green.sleep(0.25)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1')
        checked = []
        checked_metadata = []
        for uri, points in stored_values.items():
            if uri.startswith('metadata'):
                uri = uri.replace('metadata', 'test', 1)
                assert uri in ivs
                assert points[0]['leaf'] == ivs[uri].metadata['leaf']
                assert points[0]['s_nb'] == ivs[uri].static_tags['nb']
                assert points[0]['d_peer_name'] == ivs[uri].isac_node.transport.name()
                assert points[0]['d_peer_uuid'] == str(ivs[uri].isac_node.transport.uuid())
                checked_metadata.append(uri)

            else:
                assert uri in ivs
                assert points[0]['value_int'] == ivs[uri].value
                compare_time(points[0]['time'], ivs[uri].timestamp)
                assert points[0]['s_nb'] == ivs[uri].static_tags['nb']
                assert points[0]['d_peer_name'] == ivs[uri].isac_node.transport.name()
                assert points[0]['d_peer_uuid'] == str(ivs[uri].isac_node.transport.uuid())
                checked.append(uri)

        assert sorted(checked) == sorted(ivs.keys())
        assert sorted(checked_metadata) == sorted(ivs.keys())
    finally:
        archiver.shutdown()
Пример #18
0
def test_call():
    obs = Observable()
    o1 = Observer()
    o2 = Observer()

    obs += o1.observer
    obs += o2.observer

    obs('arg1', 'arg2', arg3=3, arg4=4)
    green.sleep(0.1)
    assert o1.args == ('arg1', 'arg2')
    assert o1.kwargs == {'arg3': 3, 'arg4': 4}
    assert o2.args == ('arg1', 'arg2')
    assert o2.kwargs == {'arg3': 3, 'arg4': 4}

    obs()
    green.sleep(0.1)
    assert o1.args == ()
    assert o1.kwargs == {}
    assert o2.args == ()
    assert o2.kwargs == {}
def test_update_new_value(config, root_client, clean_db, one_node):
    try:
        archiver = arch.InfluxDBArchiver(config)

        iv = IsacValue(
            one_node, 'test://test_update/test_update_new_value/test', randint(0, 100),
            static_tags={'static': 'tags'}, dynamic_tags={'test': str(randint(0, 100))},
            metadata={'meta': 'data'}, survey_last_value=False, survey_static_tags=False
        )

        green.sleep(0.5)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1')
        checked = False
        checked_metadata = False
        for uri, points in stored_values.items():
            if uri.startswith('metadata'):
                uri = uri.replace('metadata', 'test', 1)
                assert uri == iv.uri
                assert points[0]['meta'] == iv.metadata['meta']
                assert points[0]['s_static'] == iv.static_tags['static']
                assert points[0]['d_peer_name'] == iv.isac_node.transport.name()
                assert points[0]['d_peer_uuid'] == str(iv.isac_node.transport.uuid())
                checked_metadata = True

            else:
                assert uri == iv.uri
                assert points[0]['value_int'] == iv.value
                compare_time(points[0]['time'], iv.timestamp)
                assert points[0]['s_static'] == iv.static_tags['static']
                assert points[0]['d_peer_name'] == iv.isac_node.transport.name()
                assert points[0]['d_peer_uuid'] == str(iv.isac_node.transport.uuid())
                assert points[0]['d_test'] == iv.tags['test']
                checked = True

        assert checked, 'Could not read record for value update'
        assert checked_metadata, 'Could not read record for metadata update'

    finally:
        archiver.shutdown()
Пример #20
0
def test_observer_at_creation(two_nodes):  # noqa: F811
    nA, nB = two_nodes
    obs = Observer()

    uri = 'test://test_isac_value/test_observer_at_creation/test_observer'
    ivA = IsacValue(nA,
                    uri,
                    randint(0, 100),
                    static_tags={
                        'this': 'is',
                        'static': 'tags'
                    },
                    survey_last_value=False,
                    survey_static_tags=False)
    ivB = IsacValue(nB, uri, observers=Observable([obs.observer]))
    green.sleep(0.5)
    assert obs.args, 'Callback not received'
    iv_recv, value, ts, tags = obs.args
    assert iv_recv == ivB
    assert value == ivA.value
    assert ts == ivA.timestamp
    assert tags == ivA.tags
    assert obs.static_tags == ivA.static_tags
def test_get_history(config, clean_db, two_nodes):
    nA, nB = two_nodes
    t_start = datetime.now()

    uri = 'test://test_history/test_get_history/test'
    ivA = IsacValue(nA, uri, survey_last_value=False, survey_static_tags=False)

    try:
        archiver = arch.InfluxDBArchiver(config)

        our_history = []
        for i in range(10):
            ivA.value = randint(0, 100)
            value, ts, tags = ivA.value_ts_tags
            ts = degrade_time(ts)
            our_history.append((value, ts, tags))
        green.sleep(0.5)
        t_end = datetime.now()

        ivB = IsacValue(nB, uri, survey_last_value=False, survey_static_tags=False)
        data = ivB.get_history((t_start, t_end))
        assert data == our_history
    finally:
        archiver.shutdown()
def test_smoothing_by_config(config, clean_db, one_node):
    t_start = datetime.now()
    iv = IsacValue(one_node, 'test://test_smoothing/test_smoothing_by_config/test', survey_last_value=False, survey_static_tags=False)

    try:
        config['config']['default_smoothing'] = True
        archiver = arch.InfluxDBArchiver(config)

        expected_history = []
        def _save_point():
            value, ts, tags = iv.value_ts_tags
            ts = degrade_time(ts)
            expected_history.append((value, ts, tags))

        base = randint(0, 100)
        iv.value = base
        _save_point()
        iv.value += 10
        _save_point()
        iv.value = iv.value
        iv.value = iv.value
        iv.value = iv.value
        iv.value = iv.value
        _save_point()
        iv.value += 10
        _save_point()

        green.sleep(0.5)
        t_end = datetime.now()

        data = iv.get_history((t_start, t_end))
        assert data == expected_history

    finally:
        archiver.shutdown()
        config['config']['default_smoothing'] = arch.DEFAULT_SMOOTHING
Пример #23
0
def test_isac_value_entering_event(two_nodes):  # noqa: F811
    nA, nB = two_nodes
    obs = Observer()

    nB.register_isac_value_entering(obs.callback)
    green.sleep(0.25)

    uri = 'test://test_event_value/test_isac_value_entering_event/test'
    nA.event_isac_value_entering(uri)

    green.sleep(0.25)

    assert obs.args, 'Callback not called'
    assert obs.args == ('testA', uri)

    nB.unregister_isac_value_entering(obs.callback)
    nA.event_isac_value_entering(uri + '2')

    green.sleep(0.25)

    assert obs.args == ('testA', uri)
def test_buffer_on_db_deleted(config, root_client, clean_db, one_node):
    db = config['archiver-user']['db']
    buffer_path = config['buffer']['path']

    iv = IsacValue(one_node, 'test://test_buffer/test_buffer_on_db_deleted/test', survey_last_value=False, survey_static_tags=False)
    try:
        archiver = arch.InfluxDBArchiver(config)

        base = randint(0, 100)
        iv.value = base
        green.sleep(0.25)

        root_client.drop_database(db)

        iv.value += 10
        iv.value += 10
        green.sleep(0.25)

        assert os.path.exists(buffer_path)
        with open(buffer_path, 'r') as buffer_r:
            assert len(pickle.load(buffer_r)) == 2

        archiver._create_db()
        iv.value += 10
        green.sleep(0.5)

        assert not os.path.exists(buffer_path)
        stored_values = read_data(config, root_client, 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time ASC')
        got_values = []
        for uri, points in stored_values.items():
            assert uri == iv.uri
            for point in points:
                got_values.append(point['value_int'])

        expected_values = [i for i in range(base+10, base+40, 10)]
        assert got_values == expected_values

    finally:
        archiver.shutdown()
Пример #25
0
pub = ctx.socket(zmq.PUB)
pub.setsockopt(zmq.IPV6, 1)
pub.connect('tcp://aaaa::600:fbff:a2df:5d20:8888')

time.sleep(1)

sub = ctx.socket(zmq.SUB)
sub.setsockopt(zmq.IPV6, 1)
sub.setsockopt(zmq.SUBSCRIBE, '')
sub.connect('tcp://aaaa::600:fbff:a2df:5d20:9999')

def read_sub():
    while True:
        data = sub.recv().split('\0')
        print '< ', data
        if data[0] not in values:
            make_value(data[0])
            
        if len(data) > 1:
            values[data[0]].value = data[1:]
        else:
            values[data[0]].value = 1
    
green.spawn(read_sub)

try:
    n.serve_forever()
except KeyboardInterrupt:
    n.shutdown()
    green.sleep(1)
Пример #26
0
 def stop(self):
     self.isac_node.shutdown()
     green.sleep(2)
Пример #27
0
 def timestamp(self):
     green.sleep(0.001)
     return self._timestamp
Пример #28
0
def wait_cb(results, cb_name, timeout=10, step=0.1):
    for i in range(int(timeout/step)):
        green.sleep(step)
        if getattr(results, cb_name + '_results'):
            break
    assert getattr(results, cb_name + '_results'), 'Callback %s not called' % cb_name
Пример #29
0
 def teardown():
     n.shutdown()
     green.sleep(0.1) # Good to wait all other nodes shutdown
Пример #30
0
 def stop(self):
     self.running = False
     self.isac_node.shutdown()
     green.sleep(2)
Пример #31
0
 def timestamp_float(self):
     green.sleep(0.001)
     return time.mktime(self._timestamp.timetuple()) + (self._timestamp.microsecond / 1000000.0)
Пример #32
0
 def value_ts(self):
     green.sleep(0.001)
     return self._value, self._timestamp
Пример #33
0
 def value(self):
     green.sleep(0.001)
     return self._value
Пример #34
0
 def value_ts_tags(self):
     green.sleep(0.001)
     return self._value, self._timestamp, self._dynamic_tags
Пример #35
0
 def tags(self):
     green.sleep(0.001)
     return self._dynamic_tags