def __init__(self, device, isac_node):
        self.isac_node = isac_node

        self.signals = {}

        self._ozw_notif_queue = Queue()
        self._running = True
        green.spawn(self._notif_reader)

        self.options = ZWaveOption(
            device,
            config_path='/usr/share/openzwave/config',
            user_path='./user-dir',
            cmd_line=''
        )
        self.options.set_log_file("./user-dir/OZW_Log.log")
        self.options.set_append_log_file(False)
        self.options.set_console_output(False)
        self.options.set_save_log_level('Info')
        self.options.set_logging(False)
        self.options.lock()

        self.network = ZWaveNetwork(self.options, log=None)

        notif_to_func = [
            (ZWaveNetwork.SIGNAL_NETWORK_STARTED,    self.notif_network_started),
            (ZWaveNetwork.SIGNAL_NETWORK_RESETTED,   self.notif_network_resetted),
            (ZWaveNetwork.SIGNAL_NETWORK_READY,      self.notif_network_ready),
            (ZWaveNetwork.SIGNAL_NODE_ADDED,         self.notif_node_added),
            (ZWaveNetwork.SIGNAL_NODE_NAMING,        self.notif_node_named),
            (ZWaveNetwork.SIGNAL_NODE_REMOVED,       self.notif_node_removed),
            (ZWaveNetwork.SIGNAL_VALUE_ADDED,        self.notif_value_added),
            (ZWaveNetwork.SIGNAL_VALUE_CHANGED,      self.notif_value_update),
            (ZWaveNetwork.SIGNAL_VALUE_REMOVED,      self.notif_value_removed),
            (ZWaveNetwork.SIGNAL_CONTROLLER_COMMAND, self.notif_ctrl_message),
            (ZWaveNetwork.SIGNAL_CONTROLLER_WAITING, self.notif_ctrl_message),
        ]
        for notif, func in notif_to_func:
            dispatcher.connect(self._notif_wrapper(func), notif, weak=False)

        # dispatcher.connect(self._notif_wrapper_all, All)

        self.isac_node.add_rpc(self.network_heal)
        self.isac_node.add_rpc(self.controller_add_node, name='add_node')
        self.isac_node.add_rpc(self.controller_remove_node, name='remove_node')
        self.isac_node.add_rpc(self.controller_cancel_command, name='cancel_command')
Beispiel #2
0
def test_survey_process(node_with_callbacks):
    nA, results = node_with_callbacks
    nA.run()

    nB = PyreNode('testB')
    try:
        nB.run()
        wait_cb(results, 'on_new_peer')

        # Test it can cope with wrong req_id
        nA.reply_survey(nB.uuid(), {'req_id': 'wrong!', 'data': ''})

        def _reply():
            wait_cb(results, 'on_survey')
            peer_id, peer_name, request = results.on_survey_results
            nA.reply_survey(peer_id, {'req_id': 'this is a test', 'data': 'OK!'})
        green.spawn(_reply)

        assert nB.send_survey({'req_id': 'this is a test'}, 1, 1) == [('testA', 'OK!')]
    finally:
        nB.shutdown()
Beispiel #3
0
def test_survey_process(node_with_callbacks, request):
    nA, results = node_with_callbacks
    nA.run()

    nB = PyreNode('testB')
    request.addfinalizer(nB.shutdown)

    nB.run()
    wait_cb(results, 'on_new_peer')

    # Test it can cope with wrong req_id
    nA.reply_survey(nB.uuid(), {'req_id': 'wrong!', 'data': ''})

    def _reply():
        wait_cb(results, 'on_survey')
        peer_id, peer_name, request = results.on_survey_results
        nA.reply_survey(peer_id, {'req_id': 'this is a test', 'data': 'OK!'})

    green.spawn(_reply)

    assert nB.send_survey({'req_id': 'this is a test'}, 1,
                          1) == [(b'testA', 'OK!')]
Beispiel #4
0
pub = ctx.socket(zmq.PUB)
pub.setsockopt(zmq.IPV6, 1)
pub.connect('tcp://aaaa::600:fbff:a2df:5d20:8888')

time.sleep(1)

sub = ctx.socket(zmq.SUB)
sub.setsockopt(zmq.IPV6, 1)
sub.setsockopt(zmq.SUBSCRIBE, '')
sub.connect('tcp://aaaa::600:fbff:a2df:5d20:9999')

def read_sub():
    while True:
        data = sub.recv().split('\0')
        print '< ', data
        if data[0] not in values:
            make_value(data[0])
            
        if len(data) > 1:
            values[data[0]].value = data[1:]
        else:
            values[data[0]].value = 1
    
green.spawn(read_sub)

try:
    n.serve_forever()
except KeyboardInterrupt:
    n.shutdown()
    green.sleep(1)
Beispiel #5
0
 def start(self):
     green.spawn(self.run)
    def __init__(self, config):
        logger.info('Starting')
        self.config = config

        buffer_path = self.config['buffer']['path']
        if not os.path.exists(os.path.dirname(buffer_path)):
            os.makedirs(os.path.dirname(buffer_path))

        dsn = InfluxDBArchiver.make_DSN(**self.config['archiver-user'])
        for i in range(2):
            self._client = InfluxDBClient.from_DSN(dsn, password=self.config['archiver-user']['password'])
            try:
                self._client.query('SHOW MEASUREMENTS')
                break
            except InfluxDBClientError as ex:
                if ex.code == 401:
                    logger.error(ex)
                    logger.warning('Could not connect as user %s, trying as root to setup the DB', self.config['archiver-user']['username'])
                    self._create_user()
                elif ex.message.startswith('database not found'):
                    logger.warning('Could not find database %s, creating it', self.config['archiver-user']['db'])
                    self._create_db()
                else:
                    raise
        logger.info('Connected to DB with %s', dsn)

        self.isac_node = IsacNode('alidron-archiver-influxdb')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.signals = {}

        query = 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1'
        logger.debug('Doing query: %s', query)
        raw_data = self._client.query(query)
        logger.debug('Raw data: %s', pf(raw_data.items()))
        metadata = {}

        def _make_uri(meas, tags):
            uri_str = uricompose(scheme=meas, authority=tags['authority'], path=tags['path'])
            return uri_str, urisplit(uri_str)

        for meas_tags, fields in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                raw_metadata = fields.next()
                uri_str = uri_str.replace('metadata', raw_metadata['scheme'], 1)
                metadata[uri_str] = {}
                for key, value in raw_metadata.items():
                    if key.startswith('d_') or key.startswith('s_') or key in ['time', 'scheme']:
                        continue
                    if (key.startswith('value')) and (value is None):
                        continue

                    if key.startswith('json_'):
                        if value is None:
                            metadata[uri_str][key[len('json_'):]] = None
                        else:
                            try:
                                metadata[uri_str][key[len('json_'):]] = json.loads(str(value))
                            except ValueError:
                                logger.error('Wrong JSON for %s at key %s: %s', uri_str, key, str(value))
                                continue
                    else:
                        metadata[uri_str][key] = value

                logger.debug('Read metadata for %s: %s', uri_str, metadata[uri_str])

        for meas_tags, data in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                continue

            last_point = data.next()

            try:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%S.%fZ')
            except ValueError:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%SZ')

            static_tags = {}
            dynamic_tags = {}
            for k, v in last_point.items():
                if k.startswith('s_'):
                    static_tags[k[2:]] = v
                elif k.startswith('d_'):
                    dynamic_tags[k[2:]] = v

            logger.debug('For URI %s: %s, %s', uri_str, ts, pf(last_point))
            logger.debug('Decoded tags: %s, %s', static_tags, dynamic_tags)

            self.signals[uri_str] = InfluxDBArchivedValue(
                self.isac_node, uri_str,
                initial_value=(self._type_from_db_to_py(last_point), ts),
                static_tags=static_tags, dynamic_tags=dynamic_tags,
                observers=Observable([self._notify]),
                metadata=metadata.get(uri_str, None),
                survey_last_value=False,
                survey_static_tags=False,
                influxdb_client=self._client,
            )
            self.signals[uri_str].metadata_observers += self._notify_metadata
            green.spawn(self.signals[uri_str].survey_metadata)

            logger.warning('Discovered %s', uri_str)

        logger.warning('Done loading existing signals')

        self.isac_node.register_isac_value_entering(self._new_signal)
        signal_uris = self.isac_node.survey_value_uri('.*')
        map(partial(self._new_signal, ''), signal_uris)