Ejemplo n.º 1
0
def test_publish_all_last_values_tags_metadata(config, one_node):
    '''
    Full alive->dead->alive cycle:
    - Create a node
    - Create Isac Values
    - Create archiver (and archive current values)
    - Stop node/values
    - Stop archiver
    - Recreate archiver (load from DB the last values + tags + metadata <<< What we want to test)
    - Recreate node
    - Recreate values
    - Finally, assert values, tags and metadata
    '''
    ivs = {}
    def _make_iv(uri):
        iv = IsacValue(one_node, uri, static_tags={'nb': uri[-1]}, metadata={'leaf': uri[-3:], 'list': list('123456789'), 'dict': dict(zip(list('ABCDEFGHI'), list('123456789')))}, survey_last_value=False, survey_static_tags=False)
        iv.value = randint(0, 100)
        ivs[uri] = iv.value, iv.timestamp, iv.tags, iv.static_tags, iv.metadata
    _make_iv('test://test_init/test_publish_all_last_values_tags_metadata/iv1')
    _make_iv('test://test_init/test_publish_all_last_values_tags_metadata/iv2')

    try:
        archiver = arch.InfluxDBArchiver(config)

        one_node.shutdown()
        one_node = None
    finally:
        archiver.shutdown()

    try:
        archiver = arch.InfluxDBArchiver(config)
        try:
            one_node = IsacNode('test2')
            assert one_node.transport.peers() == [archiver.isac_node.transport.uuid()], 'Seems that too much node are still on the network'

            uris = one_node.survey_value_uri('.*')

            assert sorted(uris) == sorted(ivs.keys())

            for uri in ivs.keys():
                iv = IsacValue(one_node, uri)
                iv.survey_metadata()

                assert iv.value == ivs[uri][0]
                compare_time(iv.timestamp, ivs[uri][1])
                # print '>>>>>>', uri, iv.value, iv.timestamp, iv.tags, iv.static_tags
                # TODO: assert iv.tags == ivs[uri][2] # Original peer name/uuid get squashed by IsacValue because we give it an initial value...
                assert iv.static_tags == ivs[uri][3]
                assert iv.metadata == ivs[uri][4]
        finally:
            one_node.shutdown()
    finally:
        archiver.shutdown()
Ejemplo n.º 2
0
    def __init__(self):
        self.node = IsacNode('demo-zwave-react')
        green.signal(signal.SIGTERM, partial(self.sigterm_handler))

        self.action_value = IsacValue(self.node, 'zwave://0xdefbc93b.power_strip001/switch_binary/1/switch', survey_last_value=False, survey_static_tags=False)

        self.sensor_value = IsacValue(self.node, 'zwave://0xdefbc93b.13/alarm/access_control', survey_last_value=False, survey_static_tags=False)
        self.sensor_value.observers += self.value_update
Ejemplo n.º 3
0
def test_creation_double():
    n1 = IsacNode('A')
    n2 = IsacNode('B')

    try:
        assert n1.transport.uuid() == n2.transport.peers()[0]
        assert n2.transport.uuid() == n1.transport.peers()[0]
    finally:
        n1.shutdown()
        n2.shutdown()
Ejemplo n.º 4
0
    def __init__(self, bridges):
        self.isac_node = IsacNode('alidron-hue')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.bridges = bridges
        self.signals = {}

        self.sync_signals()
Ejemplo n.º 5
0
class DemoNode(object):

    def __init__(self):
        self.node = IsacNode('demo-zwave-react')
        green.signal(signal.SIGTERM, partial(self.sigterm_handler))

        self.action_value = IsacValue(self.node, 'zwave://0xdefbc93b.power_strip001/switch_binary/1/switch', survey_last_value=False, survey_static_tags=False)

        self.sensor_value = IsacValue(self.node, 'zwave://0xdefbc93b.13/alarm/access_control', survey_last_value=False, survey_static_tags=False)
        self.sensor_value.observers += self.value_update

    def value_update(self, iv, value, timestamp, tags):
        print 'Received update: ', value
        if value == 0x16: # Open
            self.action_value.value = True
        elif value == 0x17: # Close
            self.action_value.value = False

    def sigterm_handler(self):
        self.node.shutdown()
        sys.exit(0)
Ejemplo n.º 6
0
    def __init__(self, port):
        self.isac_node = IsacNode('alidron-tic')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.ser = serial.Serial(
            port=port,
            baudrate=1200,
            bytesize=serial.SEVENBITS,
            parity=serial.PARITY_EVEN,
            stopbits=serial.STOPBITS_ONE,
            timeout=1
        )

        self.signals = {}
Ejemplo n.º 7
0
def test_creation_double():
    nA = IsacNode('A')
    nB = IsacNode('B')

    uri = 'test://test_isac_value/test_creation_double/my_value'
    try:
        ivA = IsacValue(nA,
                        uri,
                        survey_last_value=False,
                        survey_static_tags=False)
        ivB = IsacValue(nB,
                        uri,
                        survey_last_value=False,
                        survey_static_tags=False)
    finally:
        nA.shutdown()
        nB.shutdown()
Ejemplo n.º 8
0
def test_creation_double():
    n1 = IsacNode('A')
    n2 = IsacNode('B')

    try:
        assert n1.transport.uuid() == n2.transport.peers()[0]
        assert n2.transport.uuid() == n1.transport.peers()[0]
    finally:
        n1.shutdown()
        n2.shutdown()
Ejemplo n.º 9
0
def test_creation_double():
    nA = IsacNode('A')
    nB = IsacNode('B')

    uri = 'test://test_isac_value/test_creation_double/my_value'
    try:
        ivA = IsacValue(nA, uri, survey_last_value=False, survey_static_tags=False)
        ivB = IsacValue(nB, uri, survey_last_value=False, survey_static_tags=False)
    finally:
        nA.shutdown()
        nB.shutdown()
    def __init__(self, config):
        logger.info('Starting')
        self.config = config

        buffer_path = self.config['buffer']['path']
        if not os.path.exists(os.path.dirname(buffer_path)):
            os.makedirs(os.path.dirname(buffer_path))

        dsn = InfluxDBArchiver.make_DSN(**self.config['archiver-user'])
        for i in range(2):
            self._client = InfluxDBClient.from_DSN(dsn, password=self.config['archiver-user']['password'])
            try:
                self._client.query('SHOW MEASUREMENTS')
                break
            except InfluxDBClientError as ex:
                if ex.code == 401:
                    logger.error(ex)
                    logger.warning('Could not connect as user %s, trying as root to setup the DB', self.config['archiver-user']['username'])
                    self._create_user()
                elif ex.message.startswith('database not found'):
                    logger.warning('Could not find database %s, creating it', self.config['archiver-user']['db'])
                    self._create_db()
                else:
                    raise
        logger.info('Connected to DB with %s', dsn)

        self.isac_node = IsacNode('alidron-archiver-influxdb')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.signals = {}

        query = 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1'
        logger.debug('Doing query: %s', query)
        raw_data = self._client.query(query)
        logger.debug('Raw data: %s', pf(raw_data.items()))
        metadata = {}

        def _make_uri(meas, tags):
            uri_str = uricompose(scheme=meas, authority=tags['authority'], path=tags['path'])
            return uri_str, urisplit(uri_str)

        for meas_tags, fields in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                raw_metadata = fields.next()
                uri_str = uri_str.replace('metadata', raw_metadata['scheme'], 1)
                metadata[uri_str] = {}
                for key, value in raw_metadata.items():
                    if key.startswith('d_') or key.startswith('s_') or key in ['time', 'scheme']:
                        continue
                    if (key.startswith('value')) and (value is None):
                        continue

                    if key.startswith('json_'):
                        if value is None:
                            metadata[uri_str][key[len('json_'):]] = None
                        else:
                            try:
                                metadata[uri_str][key[len('json_'):]] = json.loads(str(value))
                            except ValueError:
                                logger.error('Wrong JSON for %s at key %s: %s', uri_str, key, str(value))
                                continue
                    else:
                        metadata[uri_str][key] = value

                logger.debug('Read metadata for %s: %s', uri_str, metadata[uri_str])

        for meas_tags, data in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                continue

            last_point = data.next()

            try:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%S.%fZ')
            except ValueError:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%SZ')

            static_tags = {}
            dynamic_tags = {}
            for k, v in last_point.items():
                if k.startswith('s_'):
                    static_tags[k[2:]] = v
                elif k.startswith('d_'):
                    dynamic_tags[k[2:]] = v

            logger.debug('For URI %s: %s, %s', uri_str, ts, pf(last_point))
            logger.debug('Decoded tags: %s, %s', static_tags, dynamic_tags)

            self.signals[uri_str] = InfluxDBArchivedValue(
                self.isac_node, uri_str,
                initial_value=(self._type_from_db_to_py(last_point), ts),
                static_tags=static_tags, dynamic_tags=dynamic_tags,
                observers=Observable([self._notify]),
                metadata=metadata.get(uri_str, None),
                survey_last_value=False,
                survey_static_tags=False,
                influxdb_client=self._client,
            )
            self.signals[uri_str].metadata_observers += self._notify_metadata
            green.spawn(self.signals[uri_str].survey_metadata)

            logger.warning('Discovered %s', uri_str)

        logger.warning('Done loading existing signals')

        self.isac_node.register_isac_value_entering(self._new_signal)
        signal_uris = self.isac_node.survey_value_uri('.*')
        map(partial(self._new_signal, ''), signal_uris)
Ejemplo n.º 11
0
def test_creation():
    n = IsacNode('test')
    n.shutdown()
Ejemplo n.º 12
0
def test_creation():
    n = IsacNode('test')
    n.shutdown()
Ejemplo n.º 13
0
        self.network.stop()
        logger.info('Stopped network')
        self.network.destroy()
        logger.info('Destroyed network')
        self.isac_node.shutdown()
        logger.info('Stopped ISAC node')


def sigterm_handler(alidron_ozw):
    logger.info('Received SIGTERM signal, exiting')
    alidron_ozw.shutdown()
    logger.info('Exiting')
    sys.exit(0)


if __name__ == '__main__':
    DEVICE = sys.argv[1]

    isac_node = IsacNode('alidron-openzwave-controller')

    alidron_ozw = AlidronOZW(DEVICE, isac_node)

    green.signal(signal.SIGTERM, partial(sigterm_handler, alidron_ozw))

    try:
        isac_node.serve_forever()
    except KeyboardInterrupt:
        alidron_ozw.shutdown()
        green.sleep(1)
Ejemplo n.º 14
0
class AlidronHue(object):

    def __init__(self, bridges):
        self.isac_node = IsacNode('alidron-hue')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.bridges = bridges
        self.signals = {}

        self.sync_signals()

    def sync_signals(self):
        for bridge in self.bridges.values():
            for light in bridge.get_light().values():
                light_obj = bridge[str(light['name'])]
                for prop in light['state']:
                    if prop == 'bri':
                        prop = 'brightness'
                    elif prop == 'sat':
                        prop = 'saturation'

                    uri = self.make_uri(light_obj, prop)
                    if uri not in self.signals:
                        self.make_value(uri, light_obj, prop)

    def make_uri(self, light_obj, prop):
        return 'hue://%s/%s' % (light_obj.name, str(prop))

    def make_value(self, uri, light_obj, prop):
        iv = IsacValue(
            self.isac_node, uri,
            initial_value=getattr(light_obj, prop),
            survey_last_value=False,
            survey_static_tags=False
        )
        iv.observers += self.value_update

        self.signals[uri] = {
            'isac_value': iv,
            'light_object': light_obj,
            'property': prop,
        }

        print '>>> Registered', uri

    def value_update(self, iv, value, timestamp, tags):
        print 'Receveid update for', iv.uri, ':', value, tags

        if 'transitiontime' in tags:
            old_trt = self.signals[iv.uri]['light_object'].transitiontime
            self.signals[iv.uri]['light_object'].transitiontime = tags['transitiontime']

        setattr(
            self.signals[iv.uri]['light_object'],
            self.signals[iv.uri]['property'],
            value
        )

        if 'transitiontime' in tags:
            self.signals[iv.uri]['light_object'].transitiontime = old_trt

    def serve_forever(self):
        try:
            self.isac_node.serve_forever()
        except (KeyboardInterrupt, SystemExit):
            logger.info('Stopping')
            return

    def stop(self):
        self.isac_node.shutdown()
        green.sleep(2)

    def _sigterm_handler(self):
        logger.info('Received SIGTERM signal, exiting')
        self.stop()
        logger.info('Exiting')
        sys.exit(0)
Ejemplo n.º 15
0
        self.isac_node.shutdown()

    def do_EOF(self, args):
        self.do_stop(None)
        return -1

    def postloop(self):
        cmd.Cmd.postloop(self)   ## Clean up command completion
        print "Exiting..."

def ping(*args, **kwargs):
    return 'PONG!', args, kwargs


if __name__ == '__main__':
    isac_node = IsacNode(sys.argv[1])
    isac_node.add_rpc(ping)

    #val = IsacValue(isac_node, 'switch://dimer001/switch_binary/switch')
    #green.sleep(0.1)
    #val.value = not val.value
    #green.sleep(0.1)

    try:
        def notify_isac_value_entering(peer_name, value_name):
            print '>>>>', peer_name, value_name

        # if sys.argv[1] in ['test01', 'gdsjkl01']:
        isac_node.register_isac_value_entering(notify_isac_value_entering)

class InfluxDBArchiver(object):

    @staticmethod
    def make_DSN(with_db=True, **kwargs):
        if with_db:
            return '{scheme}://{username}@{hostname}:{port}/{db}'.format(**kwargs)
        else:
            return '{scheme}://{username}@{hostname}:{port}'.format(**kwargs)

    _type_list_py_db = [
        (bool, 'boolean'),
        (str, 'string'),
        (unicode, 'string'),
        (int, 'int'),
        (float, 'real'),
    ]
    _types_from_py_to_db = dict(_type_list_py_db)
    _types_from_db_to_py = dict([(db, py) for py, db in _type_list_py_db])

    def __init__(self, config):
        logger.info('Starting')
        self.config = config

        buffer_path = self.config['buffer']['path']
        if not os.path.exists(os.path.dirname(buffer_path)):
            os.makedirs(os.path.dirname(buffer_path))

        dsn = InfluxDBArchiver.make_DSN(**self.config['archiver-user'])
        for i in range(2):
            self._client = InfluxDBClient.from_DSN(dsn, password=self.config['archiver-user']['password'])
            try:
                self._client.query('SHOW MEASUREMENTS')
                break
            except InfluxDBClientError as ex:
                if ex.code == 401:
                    logger.error(ex)
                    logger.warning('Could not connect as user %s, trying as root to setup the DB', self.config['archiver-user']['username'])
                    self._create_user()
                elif ex.message.startswith('database not found'):
                    logger.warning('Could not find database %s, creating it', self.config['archiver-user']['db'])
                    self._create_db()
                else:
                    raise
        logger.info('Connected to DB with %s', dsn)

        self.isac_node = IsacNode('alidron-archiver-influxdb')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.signals = {}

        query = 'SELECT * FROM /.*/ GROUP BY authority, path ORDER BY time DESC LIMIT 1'
        logger.debug('Doing query: %s', query)
        raw_data = self._client.query(query)
        logger.debug('Raw data: %s', pf(raw_data.items()))
        metadata = {}

        def _make_uri(meas, tags):
            uri_str = uricompose(scheme=meas, authority=tags['authority'], path=tags['path'])
            return uri_str, urisplit(uri_str)

        for meas_tags, fields in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                raw_metadata = fields.next()
                uri_str = uri_str.replace('metadata', raw_metadata['scheme'], 1)
                metadata[uri_str] = {}
                for key, value in raw_metadata.items():
                    if key.startswith('d_') or key.startswith('s_') or key in ['time', 'scheme']:
                        continue
                    if (key.startswith('value')) and (value is None):
                        continue

                    if key.startswith('json_'):
                        if value is None:
                            metadata[uri_str][key[len('json_'):]] = None
                        else:
                            try:
                                metadata[uri_str][key[len('json_'):]] = json.loads(str(value))
                            except ValueError:
                                logger.error('Wrong JSON for %s at key %s: %s', uri_str, key, str(value))
                                continue
                    else:
                        metadata[uri_str][key] = value

                logger.debug('Read metadata for %s: %s', uri_str, metadata[uri_str])

        for meas_tags, data in raw_data.items():
            uri_str, uri = _make_uri(*meas_tags)
            if uri.scheme == 'metadata':
                continue

            last_point = data.next()

            try:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%S.%fZ')
            except ValueError:
                ts = datetime.strptime(last_point['time'], '%Y-%m-%dT%H:%M:%SZ')

            static_tags = {}
            dynamic_tags = {}
            for k, v in last_point.items():
                if k.startswith('s_'):
                    static_tags[k[2:]] = v
                elif k.startswith('d_'):
                    dynamic_tags[k[2:]] = v

            logger.debug('For URI %s: %s, %s', uri_str, ts, pf(last_point))
            logger.debug('Decoded tags: %s, %s', static_tags, dynamic_tags)

            self.signals[uri_str] = InfluxDBArchivedValue(
                self.isac_node, uri_str,
                initial_value=(self._type_from_db_to_py(last_point), ts),
                static_tags=static_tags, dynamic_tags=dynamic_tags,
                observers=Observable([self._notify]),
                metadata=metadata.get(uri_str, None),
                survey_last_value=False,
                survey_static_tags=False,
                influxdb_client=self._client,
            )
            self.signals[uri_str].metadata_observers += self._notify_metadata
            green.spawn(self.signals[uri_str].survey_metadata)

            logger.warning('Discovered %s', uri_str)

        logger.warning('Done loading existing signals')

        self.isac_node.register_isac_value_entering(self._new_signal)
        signal_uris = self.isac_node.survey_value_uri('.*')
        map(partial(self._new_signal, ''), signal_uris)

    def _create_user(self):
        dsn = InfluxDBArchiver.make_DSN(with_db=False, **self.config['admin-user'])
        root_client = InfluxDBClient.from_DSN(dsn, password=self.config['admin-user']['password'])

        root_client.create_user(self.config['archiver-user']['username'], self.config['archiver-user']['password'])
        root_client.grant_privilege('all', self.config['archiver-user']['db'], self.config['archiver-user']['username'])

        # dsn = InfluxDBArchiver.make_DSN(with_db=False, **self.config['archiver-user'])
        # self._client = InfluxDBClient.from_DSN(dsn, password=self.config['archiver-user']['password'])

    def _create_db(self):
        dsn = InfluxDBArchiver.make_DSN(with_db=False, **self.config['admin-user'])
        root_client = InfluxDBClient.from_DSN(dsn, password=self.config['admin-user']['password'])

        db = self.config['archiver-user']['db']
        root_client.create_database(db)
        root_client.alter_retention_policy('default', db, replication='3')

    def _new_signal(self, peer_name, signal_uri):
        signal_uri = signal_uri.encode()
        if signal_uri not in self.signals:
            logger.info('Signal %s will be archived', signal_uri)
            self.signals[signal_uri] = InfluxDBArchivedValue(self.isac_node, signal_uri, observers=Observable([self._notify]), influxdb_client=self._client)
            self.signals[signal_uri].metadata_observers += self._notify_metadata
            self.signals[signal_uri].survey_metadata()
            logger.debug('>>>>> static_tags %s: %s', signal_uri, self.signals[signal_uri].static_tags)

    @staticmethod
    def _prefix_keys(d, prefix):
        return {prefix+k: v for k, v in d.items()}

    @staticmethod
    def _type_from_py_to_db(value):
        if type(value) in InfluxDBArchiver._types_from_py_to_db.keys():
            field_name = 'value_' + InfluxDBArchiver._types_from_py_to_db[type(value)]
        else:
            field_name = 'value_json'
            value = json.dumps(value)

        return field_name, value

    @staticmethod
    def _type_from_db_to_py(fields):
        for field_name, value in fields.items():
            if not field_name.startswith('value_'):
                continue
            elif value is None:
                continue
            else:
                if field_name == 'value_json':
                    return json.loads(value)
                else:
                    return InfluxDBArchiver._types_from_db_to_py[field_name[len('value_'):]](value)

    def _notify(self, iv, value, ts, dynamic_tags):
        # We are already in a green thread here
        uri = urisplit(iv.uri)
        data = []

        def _make_data(value, ts, dynamic_tags):
            tags = self._prefix_keys(iv.static_tags, 's_')
            tags.update(self._prefix_keys(dynamic_tags, 'd_'))
            tags['authority'] = uri.authority
            tags['path'] = uri.path

            field_name, value = self._type_from_py_to_db(value)

            return {
                'measurement': uri.scheme,
                'time': ts,
                'fields': {field_name: value},
                'tags': tags,
            }

        # Handle smoothing
        default_smoothing = self.config.get('config', {}).get('default_smoothing', DEFAULT_SMOOTHING)
        smoothing = iv.metadata.get('smoothing', default_smoothing) if iv.metadata else default_smoothing
        logger.debug('Smoothing: %s', smoothing)
        if bool(smoothing):
            prev_value, prev_ts, prev_tags = getattr(iv, '_arch_prev_update', (None, datetime.fromtimestamp(0), {}))
            in_smoothing = getattr(iv, '_arch_in_smoothing', False)

            iv._arch_prev_update = (value, ts, dynamic_tags)
            if (prev_value == value) and (dynamic_tags == prev_tags):
                logger.debug('Smoothing detected same value and tags, not sending to DB')
                iv._arch_in_smoothing = True
                return
            elif in_smoothing:
                # Flush last same value to provide an end time for the smoothed out period
                logger.debug('Smoothing detected a different value than the one smoothed before. Flushing last same value')
                data.append(_make_data(prev_value, prev_ts, prev_tags))
                iv._arch_in_smoothing = False
            else:
                logger.debug('Smoothing detected normal value change: %s, %s, %s / %s, %s, %s', prev_value, prev_ts, prev_tags, value, ts, dynamic_tags)

        data.append(_make_data(value, ts, dynamic_tags))

        precision = self.config.get('config', {}).get('default_ts_precision', DEFAULT_TS_PRECISION)
        if iv.metadata and 'ts_precision' in iv.metadata:
            precision = iv.metadata['ts_precision']

        logger.info('Writing for %s: %s', uri, data)

        self._write_data(data, precision)

    def _notify_metadata(self, iv, metadata, source_peer):
        # We are already in a green thread here
        if not isinstance(metadata, dict):
            metadata = {'metadata': metadata}

        uri = urisplit(iv.uri)

        tags = self._prefix_keys(iv.static_tags, 's_')
        tags.update(self._prefix_keys(source_peer, 'd_'))
        tags['authority'] = uri.authority
        tags['path'] = uri.path
        tags['scheme'] = uri.scheme

        metadata_to_write = {}
        for k, v in metadata.items():
            if type(v) not in InfluxDBArchiver._types_from_py_to_db.keys():
                metadata_to_write['json_' + k] = json.dumps(v)
            else:
                metadata_to_write[k] = v

        data = [{
            'measurement': 'metadata',
            'fields': metadata_to_write,
            'tags': tags
        }]
        logger.info('Writing metadata for %s: %s', uri, metadata)

        self._write_data(data)

    def _write_data(self, data, precision='ms'):
        previous_data = []
        if os.path.exists(self.config['buffer']['path']):
            with open(self.config['buffer']['path'], 'r') as buffer_r:
                previous_data += pickle.load(buffer_r)
            logger.info('Read %d records from buffer', len(previous_data))

        new_data = previous_data + data
        try:
            self._client.write_points(new_data, time_precision=precision)
        except (ConnectionError, InfluxDBClientError) as ex:
            logger.error('Failed to write to DB, flushing to buffer: %s', ex)

            with open(self.config['buffer']['path'], 'w') as buffer_w:
                pickle.dump(previous_data + data, buffer_w, -1)

            logger.info('%d records in buffer', len(new_data))

            return

        logger.info('Flushed %d records to DB', len(new_data))

        # Write succeeded, clear buffer
        if os.path.exists(self.config['buffer']['path']):
            os.remove(self.config['buffer']['path'])

    def shutdown(self):
        logger.info('Stopping')
        self._running = False
        self.isac_node.shutdown()

    def _sigterm_handler(self):
        logger.info('Received SIGTERM signal, exiting')
        self.shutdown()
        logger.info('Exiting')
        sys.exit(0)

    def serve_forever(self):
        self._running = True
        try:
            while self._running:
                green.sleep(1)
        except (KeyboardInterrupt, SystemExit):
            self.shutdown()
Ejemplo n.º 17
0
class AlidronTIC(object):

    ALLOWED_TAGS = ['ADCO', 'BASE', 'IINST', 'IMAX', 'ISOUSC', 'OPTARIF', 'PAPP', 'PTEC']

    def __init__(self, port):
        self.isac_node = IsacNode('alidron-tic')
        green.signal(signal.SIGTERM, partial(self._sigterm_handler))
        green.signal(signal.SIGINT, partial(self._sigterm_handler))

        self.ser = serial.Serial(
            port=port,
            baudrate=1200,
            bytesize=serial.SEVENBITS,
            parity=serial.PARITY_EVEN,
            stopbits=serial.STOPBITS_ONE,
            timeout=1
        )

        self.signals = {}

    def start(self):
        green.spawn(self.run)

    def serve_forever(self):
        self.start()
        try:
            while True:
                green.sleep(1)
        except (KeyboardInterrupt, SystemExit):
            logger.info('Stopping')
            self.stop()

    def stop(self):
        self.running = False
        self.isac_node.shutdown()
        green.sleep(2)

    def _sigterm_handler(self):
        logger.info('Received SIGTERM signal, exiting')
        self.stop()
        logger.info('Exiting')
        sys.exit(0)

    def run(self):
        self.running = True
        while self.running:
            try:
                try:
                    tag, data = _read_trame(self.ser.readline())
                except BadChecksum:
                    continue

                logger.debug('Read %s: %s', tag, data)
                
                if tag not in self.ALLOWED_TAGS:
                    logger.warning('Discarding %s: %s', tag, data)
                    continue
                
                try:
                    signal = self.signals[tag]
                except KeyError:
                    logger.info('Creating ISAC value for %s', tag)
                    metadata = {
                        'ts_precision': 's',
                        'smoothing': True
                    }
                    signal = IsacValue(
                        self.isac_node,
                        'tic://alidron-tic/%s' % tag,
                        static_tags={'location': 'entrance.switchboard'},
                        metadata=metadata,
                        survey_last_value=False,
                        survey_static_tags=False
                    )
                    self.signals[tag] = signal

                try:
                    signal.value = int(data)
                except ValueError:
                    signal.value = data

            except Exception as ex:
                logger.error('Hum, something weird: %s', ex)
Ejemplo n.º 18
0
def _one_node():
    n = IsacNode('test')
    yield n
    n.shutdown()
Ejemplo n.º 19
0
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

from isac import IsacNode, IsacValue

import signal
import sys
import time
from functools import partial
from isac.tools import zmq, green

# ISAC part

n = IsacNode('demo')
values = {}

def isac_update(iv, value, ts, tags):
    print '>>', iv.uri, value, ts, tags
    if value is not None:
        pub.send_multipart([iv.uri, value])
    else:
        pub.send(iv.uri)
    
def make_value(uri):
    values[uri] = IsacValue(n, uri, survey_last_value=False, survey_static_tags=False)
    values[uri].observers += isac_update
    
make_value('action://nucleo-sensor-demo/led/blue/toggle')
make_value('action://nucleo-sensor-demo/led/blue/on')
Ejemplo n.º 20
0
def _two_nodes():
    nA = IsacNode('testA')
    nB = IsacNode('testB')
    yield nA, nB
    nA.shutdown()
    nB.shutdown()