Пример #1
0
    def test___repr__(self):
        """Testing function __repr__."""
        # Setup DataPointMetadata
        variable = DataPointMetadata(5, 6)

        # Test
        expected = ('''<DataPointMetadata key='5', value='6'>''')
        result = variable.__repr__()
        self.assertEqual(result, expected)
Пример #2
0
    def test_datapoints_to_dicts(self):
        """Testing method or function named datapoints_to_dicts."""
        # Initialize key variables
        datapoints = []

        # Create DataPoints
        for value in range(0, 2):
            # Sleep to force a change in the timestamp
            sleep(0.1)

            metadata = []
            for meta in range(0, 2):
                metadata.append(DataPointMetadata(int(meta), str(meta * 2)))

            # Create the datapoint
            datapoint = DataPoint('label_{}'.format(value),
                                  value,
                                  data_type=DATA_INT)
            # Add metadata
            for meta in metadata:
                datapoint.add(meta)

            # Add metadata that should be ignored.
            for key in DATAPOINT_KEYS:
                metadata.append(DataPointMetadata(key, '_{}_'.format(key)))

            # Add the datapoint to the list
            datapoints.append(datapoint)

        # Start testing
        result = converter.datapoints_to_dicts(datapoints)
        expected = {
            'key_value_pairs': {
                0: ('0', '0'),
                1: ('1', '2'),
                2: ('pattoo_key', 'label_0'),
                3: ('pattoo_data_type', 99),
                4: ('pattoo_value', 0),
                5: ('pattoo_timestamp', 1575794447250),
                6: ('pattoo_checksum',
                    '284d21bff49bbde9eb7fc3ad98a88e5cbf72830f69743b47bd2c349'
                    '407807f68'),
                7: ('pattoo_key', 'label_1'),
                8: ('pattoo_value', 1),
                9: ('pattoo_timestamp', 1575915772433),
                10: ('pattoo_checksum',
                     'a5919eb5fc5bac62e7c80bc04155931f75e22166ed84b1d07f704f4'
                     '0b083d098')
            },
            'datapoint_pairs': [[0, 1, 2, 3, 4, 5, 6], [0, 1, 7, 3, 8, 9, 10]]
        }

        self.assertEqual(result['datapoint_pairs'],
                         expected['datapoint_pairs'])
        for key, value in result['key_value_pairs'].items():
            if key not in [5, 9]:
                self.assertEqual(expected['key_value_pairs'][key], value)
Пример #3
0
    def stats_disk_partitions(self):
        """Update agent with disk partition data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        result = []

        # Get filesystem partition utilization
        items = psutil.disk_partitions()
        # "items" is a list of named tuples describing partitions
        for item in items:
            # "source" is the partition mount point
            mountpoint = item.mountpoint
            if "docker" not in str(mountpoint):
                # Add more metadata
                meta = []
                meta.append(
                    DataPointMetadata('{}_device'.format('disk_partition'),
                                      item.device))
                meta.append(
                    DataPointMetadata('{}_mountpoint'.format('disk_partition'),
                                      item.mountpoint))
                meta.append(
                    DataPointMetadata('{}_fstype'.format('disk_partition'),
                                      item.fstype))
                meta.append(
                    DataPointMetadata('{}_opts'.format('disk_partition'),
                                      item.opts))

                # Get the partition data. Skip if the data is unreadable
                # due to permissions (eg. External storage mounted by users)
                try:
                    partition = psutil.disk_usage(mountpoint)._asdict()
                except:
                    continue

                for key, value in partition.items():
                    _dv = DataPoint('{}_disk_usage_{}'.format(
                        'disk_partition', key),
                                    value,
                                    data_type=DATA_INT)
                    _dv.add(meta)
                    _dv.add(self.metadata)
                    result.append(_dv)

        # Add the result to data
        return result
Пример #4
0
    def _get_target_datapoints(self, item):
        """Poll each spoke in parallel.

        Args:
            item: TargetPollingPoints object

        Returns:
            ddv: TargetDataPoints for the SNMPVariable target

        """
        # Intialize data gathering
        ip_target = item.target
        ddv = TargetDataPoints(ip_target)

        # BAC0 only works with IP addresses
        ip_address = network.get_ipaddress(ip_target)
        if bool(ip_address) is False:
            return ddv

        # Get list of type DataPoint
        datapoints = []
        for polltarget in item.data:
            # Get polling results
            value = poll_target_address(ip_address, polltarget.address,
                                        'presentValue', self._bacnet)
            name = poll_target_address(ip_address, polltarget.address,
                                       'objectName', self._bacnet)

            # Skip if invalid data is received
            if value is None:
                continue

            # Do multiplication
            if data.is_numeric(value) is True:
                value = float(value) * polltarget.multiplier
                data_type = DATA_FLOAT
            else:
                data_type = DATA_STRING

            # Update datapoints
            datapoint = DataPoint('analog_value_point_{}'.format(
                polltarget.address),
                                  value,
                                  data_type=data_type)
            datapoint.add(DataPointMetadata('target', ip_target))
            if name is not None:
                datapoint.add(DataPointMetadata('object_name', name))
            datapoints.append(datapoint)

        # Return
        ddv.add(datapoints)
        return ddv
Пример #5
0
    def stats_network(self):
        """Update agent with network data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        result = []

        # Get network utilization
        nicddv = psutil.net_io_counters(pernic=True)
        for nic, nic_named_tuple in nicddv.items():
            nic_dict = nic_named_tuple._asdict()
            for key, value in nic_dict.items():
                _dv = DataPoint('{}_{}'.format('network_io', key),
                                value,
                                data_type=DATA_COUNT64)
                _dv.add(self.metadata)
                _dv.add(
                    DataPointMetadata('{}_interface'.format('network_io'),
                                      nic))
                result.append(_dv)

        # Add the result to data
        return result
Пример #6
0
def test_agent():
    # Define the polling interval in seconds (integer).
    polling_interval = 300

    # Give the agent a name
    agent_name = 'sample_agent_script'

    # Let's assume the script has already received this data from SITE_A
    site_a_data = [['ABC', 123.456], ['DEF', 456.789]]

    # Let's assume the script has already received this data from WORK_1
    work_1_data = [['GHI', 654.321], ['JKL', 987.654]]

    # Setup the agent's AgentPolledData object.
    agent = AgentPolledData(agent_name, polling_interval)

    # Let's add some metadata that you don't want to affect charting in the
    # event of a change. Department names change all the time.
    metadata_static = DataPointMetadata('Department Name',
                                        'The Palisadoes Foundation',
                                        update_checksum=False)

    # Let's add some metadata that will change and trigger a new chart.
    metadata_dynamic = DataPointMetadata('Financial Year', '2020')

    # Create target objects for SITE_A
    target = TargetDataPoints('SITE_A')
    for quote in site_a_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Create target objects for WORK_1
    target = TargetDataPoints('WORK_1')
    for quote in work_1_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Return agent
    return agent
Пример #7
0
    def test___init__(self):
        """Testing function __init__."""
        # Setup DataPoint - Valid
        for key, value in [(1, 2), ('1', 2), (1, '2'), ('1', '2'), (1.1, 2.1),
                           ('1.1', 2.1), (1.1, '2.1'), ('1.1', '2.1')]:
            result = DataPointMetadata(key, value)
            self.assertEqual(result.key, str(key))
            self.assertEqual(result.value, str(value))
            self.assertTrue(result.valid)

        # Setup DataPoint - Invalid
        for key, value in [('pattoo', 1), ('123pattoo123', 1), (None, 2),
                           ('1', None), (True, '2'), ('1', True), ({}, 2.1),
                           ('1.1', {
                               2: 1
                           }), (False, '2.1'), ('1.1', False)]:
            result = DataPointMetadata(key, value)
            self.assertFalse(result.valid)
Пример #8
0
def _walker(snmpvariable, polltargets):
    """Poll each spoke in parallel.

    Args:
        snmpvariable: SNMPVariable to poll
        polltargets: List of PollingPoint objects to poll

    Returns:
        ddv: TargetDataPoints for the SNMPVariable target

    """
    # Intialize data gathering
    ddv = TargetDataPoints(snmpvariable.ip_target)

    # Get list of type DataPoint
    datapoints = []
    for polltarget in polltargets:
        # Get OID polling results
        query = snmp.SNMP(snmpvariable)
        query_datapoints = query.walk(polltarget.address)

        # Apply multiplier to the results
        for _dp in query_datapoints:
            # Do multiplication
            if data.is_data_type_numeric(_dp.data_type) is True:
                value = float(_dp.value) * polltarget.multiplier
            else:
                value = _dp.value

            # Update datapoints
            datapoint = DataPoint(polltarget.address,
                                  value,
                                  data_type=_dp.data_type)
            datapoint.add(DataPointMetadata('oid', _dp.key))
            datapoints.append(datapoint)

    # Return
    ddv.add(datapoints)
    return ddv
Пример #9
0
    def stats_disk_io(self):
        """Update agent with disk io data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        regex = re.compile(r'^ram\d+$')
        result = []

        # Get disk I/O usage. Skip if the data is unreadable
        # due to permissions (eg. External storage mounted by users)
        ioddv = psutil.disk_io_counters(perdisk=True)

        # "source" is disk name
        for disk, disk_named_tuple in ioddv.items():
            # No RAM pseudo disks. RAM disks OK.
            if bool(regex.match(disk)) is True:
                continue
            # No loopbacks
            if disk.startswith('loop') is True:
                continue

            # Populate data
            disk_dict = disk_named_tuple._asdict()
            for key, value in disk_dict.items():
                new_key = '{}_{}'.format('disk_io', key)
                _dv = DataPoint(new_key, value, data_type=DATA_COUNT64)
                _dv.add(self.metadata)
                _dv.add(DataPointMetadata('disk_partition', disk))
                result.append(_dv)

        # Add the result to data
        return result
Пример #10
0
    def __init__(self):
        """Initialize the class.

        Args:
            key: Metadata key
            value: Metadata value

        Returns:
            None

        """
        #######################################################################
        # Set non timeseries values
        #######################################################################
        self.metadata = []

        # OS release (kernel)
        self.metadata.append(
            DataPointMetadata('release',
                              platform.release(),
                              update_checksum=False))

        # OS version
        self.metadata.append(
            DataPointMetadata('version',
                              platform.version(),
                              update_checksum=False))

        # Operating sytem type (Linux / Windows)
        self.metadata.append(
            DataPointMetadata('processor', platform.processor()))

        # Operating sytem type (Linux / Windows)
        self.metadata.append(DataPointMetadata('type', platform.system()))

        # CPU count
        self.metadata.append(DataPointMetadata('cpus', psutil.cpu_count()))

        # System name
        self.metadata.append(DataPointMetadata('hostname', socket.getfqdn()))
Пример #11
0
def main():
    """Post data to pattoo server.

    Args:
        None

    Returns:
        None

    """
    '''
    NOTE:

    Scripts must be run at regular intervals and the polling_interval
    should be automatically provided to the main() function.

    Notes about CRON:
    When using cron, change this value to match the cron interval in seconds.
    It is not advised to use cron for polling unless you know the interval
    will not change. If using cron, remember to make the polling interval to
    match the cron interval in 'seconds'.

    Ideally your agents should run as daemons, not as cron jobs. See the daemon
    example script which explains how to do this.
    '''

    # Define the polling interval in seconds (integer).
    polling_interval = 300

    # Give the agent a name
    agent_name = 'sample_agent_script'

    # Let's assume the script has already received this data from SITE_A
    site_a_data = [['ABC', 123.456], ['DEF', 456.789]]

    # Let's assume the script has already received this data from WORK_1
    work_1_data = [['GHI', 654.321], ['JKL', 987.654]]
    '''
    NOTE:

    The AgentPolledData object contains unique identification information
    that the pattoo server will use to differentiate the information source.
    This includes: the hostname of the system on which the object was created,
    a unique hashed identifier stored in the cache directory of the agent
    configuration.

    You just need to write an agent to do one thing well, such data collection
    from a type of target source. For example, you don't need to give each
    agent that does the same thing a different "agent_name".  Just make sure
    that different types of agents have different "agent_name" values.

    The PattooShared library will take care of the rest.
    '''

    # Setup the agent's AgentPolledData object.
    agent = AgentPolledData(agent_name, polling_interval)
    '''
    NOTE:

    Metadata is normally expected to stay constant. If it changes then the
    datapoint ID changes and you'll start plotting a brand new chart on the
    pattoo server. The old chart will stop at the time the metadata changed.

    In some cases, you may not want changing metadata to cause a brand new
    plot. For example, your metadata for computer resource charting may include
    the operating system version. This is useful background information, but
    shouldn't impact charting if it changes. This type of metadata should be
    dynamic.

    '''
    # Let's add some metadata that you don't want to affect charting in the
    # event of a change. Department names change all the time.
    metadata_static = DataPointMetadata('Department Name',
                                        'The Palisadoes Foundation',
                                        update_checksum=False)

    # Let's add some metadata that will change and trigger a new chart.
    metadata_dynamic = DataPointMetadata('Financial Year', '2020')

    # Create target objects for SITE_A
    target = TargetDataPoints('SITE_A')
    for quote in site_a_data:
        key, value = quote
        '''
        NOTE:

        You don't have to specify the time when the data was collected.
        The DataPoint object captures that information automatically. You can
        also specify it using the timestamp= argument. See the class
        documentation for details.

        The default data_type is DATA_INT (integer). Read the documentation
        for the various other data which cover float and counter values.
        '''
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Create target objects for WORK_1
    target = TargetDataPoints('WORK_1')
    for quote in work_1_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Post the data to pattoo
    post = PostAgent(agent)
    post.post()
Пример #12
0
    def test_add(self):
        """Testing function add."""
        # Setup DataPoint - Valid
        value = 1093454
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test adding
        for key, value in [(1, 2), (3, 4), (5, 6)]:
            metadata = DataPointMetadata(key, value)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 3)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding duplicates (no change)
        for key, value in [(1, 2), (3, 4), (5, 6)]:
            metadata = DataPointMetadata(key, value)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 3)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to False. No change
        for key, value in [(10, 20), (30, 40), (50, 60)]:
            metadata = DataPointMetadata(key, value, update_checksum=False)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 6)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to True. No change,
        # as they have been added already.
        for key, value in [(10, 20), (30, 40), (50, 60)]:
            metadata = DataPointMetadata(key, value, update_checksum=True)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 6)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to True
        for key, value in [(11, 21), (31, 41), (51, 61)]:
            metadata = DataPointMetadata(key, value, update_checksum=True)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 9)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
2518ce8c9dc0683ef87a6a438c8c79c2ae3fd8ffd38032b6c1d253057d04c8f7''')
Пример #13
0
    def test___init__(self):
        """Testing function __init__."""
        # Setup DataPoint - Valid
        value = 1093454
        _key_ = 'testing'
        _metakey = '_{}'.format(_key_)
        timestamp = int(time.time() * 1000)
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertTrue(variable.timestamp >= timestamp)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - Valid
        value = 1093454
        timestamp = 7
        _key_ = 'testing'
        _metakey = '_{}'.format(_key_)
        data_type = DATA_INT
        variable = DataPoint(_key_,
                             value,
                             data_type=data_type,
                             timestamp=timestamp)
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.timestamp, timestamp)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Add metadata that should be ignored.
        for key in DATAPOINT_KEYS:
            variable.add(DataPointMetadata(key, '_{}_'.format(key)))
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable (unchanged)
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - invalid data_type
        value = 1093454
        _key_ = 'testing'
        data_type = 123
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.valid, False)

        # Setup DataPoint - invalid value for numeric data_type
        value = '_123'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.valid, False)

        # Setup DataPoint - valid value for integer data_type but
        # string for value
        value = '1093454'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, int(value))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
7f99301d9be275b14af5626ffabe22a154415ed2ef7dad37f1707bd25b6afdc6''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for int data_type but
        # string for value
        value = '1093454.3'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, int(float(value)))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
7f99301d9be275b14af5626ffabe22a154415ed2ef7dad37f1707bd25b6afdc6''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for int data_type but
        # string for value
        value = '1093454.3'
        _key_ = 'testing'
        data_type = DATA_FLOAT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, float(value))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
ab48bdc902e2ea5476a54680a7ace0971ab90edb3f6ffe00a89b2d1e17b1548d''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for str data_type
        for value in [0, 1, '1093454.3']:
            _key_ = 'testing'
            data_type = DATA_STRING
            variable = DataPoint(_key_, value, data_type=data_type)

            # Test each variable
            self.assertEqual(variable.data_type, data_type)
            self.assertEqual(variable.value, str(value))
            self.assertEqual(variable.key, _key_)
            self.assertEqual(len(variable.checksum), 64)
            self.assertEqual(
                variable.checksum, '''\
431111472993bf4d9b8b347476b79321fea8a337f3c1cb2fedaa185b54185540''')
            self.assertEqual(variable.valid, True)

        # Setup DataPoint - invalid value for str data_type
        for value in [True, False, None]:
            _key_ = 'testing'
            data_type = DATA_STRING
            variable = DataPoint(_key_, value, data_type=data_type)

            # Test each variable
            self.assertEqual(variable.data_type, data_type)
            self.assertEqual(variable.valid, False)
            self.assertEqual(variable.value, str(value))
            self.assertIsNone(variable.key)
            self.assertEqual(len(variable.checksum), 64)
            self.assertEqual(
                variable.checksum, '''\
a783370f88d8c54b5f5e6641af69d86dae5d4d62621d55cf7e63f6c66644c214''')
Пример #14
0
async def _serial_poller_async(tpp):
    """Poll OPCUA agent data.

    Args:
        tpp: TargetDataPoints object

    Returns:
        target_datapoints: TargetDataPoints object

    """
    # Initialize key variables
    connected = False

    # Test for validity
    if isinstance(tpp, TargetPollingPoints) is False:
        return None
    if isinstance(tpp.target, OPCUAauth) is False:
        return None
    if tpp.valid is False:
        return None

    # Create URL for polling
    ip_target = tpp.target.ip_target
    ip_port = tpp.target.ip_port
    username = tpp.target.username
    password = tpp.target.password
    url = 'opc.tcp://{}:{}'.format(ip_target, ip_port)

    # Intialize data gathering
    target_datapoints = TargetDataPoints(ip_target)

    # Create a client object to connect to OPCUA server
    client = Client(url=url)
    client.set_user(username)
    client.set_password(password)

    # Connect
    try:
        await client.connect()
        connected = True
    except:
        log_message = (
            'Authentication for polling target {} is incorrect'.format(url))
        log.log2warning(51011, log_message)
        pass

    if connected is True:
        for point in tpp.data:
            # Make sure we have the right data type
            if isinstance(point, PollingPoint) is False:
                log_message = ('''\
Invalid polling point {} for OPC UA URL {}'''.format(point, url))
                log.log2info(51012, log_message)
                continue

            # Get data
            address = point.address
            try:
                node = client.get_node(address)
                value = await node.read_value()
            except BadNodeIdUnknown:
                log_message = ('''\
OPC UA node {} not found on server {}'''.format(address, url))
                log.log2warning(51015, log_message)
                continue
            except:
                _exception = sys.exc_info()
                log_message = ('OPC UA server communication error')
                log.log2exception(51014, _exception, message=log_message)
                log_message = ('''\
Cannot get value from polling point {} for OPC UA URL {}\
'''.format(address, url))
                log.log2info(51013, log_message)
                continue

            # Create datapoint
            if bool(point.multiplier) is True:
                if is_numeric(value) is True and (is_numeric(point.multiplier)
                                                  is True):
                    value = value * point.multiplier
            else:
                value = 0
            datapoint = DataPoint(address, value)
            datapoint.add(DataPointMetadata('OPCUA Server', ip_target))
            target_datapoints.add(datapoint)

        # Disconnect client
        await client.disconnect()

    return target_datapoints
Пример #15
0
def _serial_poller(drv):
    """Poll each spoke in parallel.

    Args:
        drv: Target to poll
        input_registers: Input registers to poll
        holding_registers: Holding registers to poll

    Returns:
        ddv: TargetDataPoints for the ip_target

    """
    # Intialize data gathering
    ip_target = drv.target
    ddv = TargetDataPoints(ip_target)

    # Get list of type DataPoint
    datapoints = []
    for _rv in drv.data:
        # Ignore invalid data
        if isinstance(_rv, RegisterVariable) is False:
            continue
        if _rv.valid is False:
            continue

        # Poll
        client = ModbusTcpClient(ip_target)
        if isinstance(_rv, InputRegisterVariable):
            try:
                response = client.read_input_registers(_rv.address,
                                                       count=_rv.count,
                                                       unit=_rv.unit)
                key = 'input_register'
            except ConnectionException as _err:
                log_message = ('''\
Cannot connect to target {} to retrieve input register {}, count {}, \
unit {}: {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit, str(_err)))
                log.log2warning(65028, log_message)
                continue
            except:
                log_message = ('''\
Cause unknown failure with target {} getting input register {}, count {}, \
unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit))
                log.log2warning(65030, log_message)
                continue
        elif isinstance(_rv, HoldingRegisterVariable):
            try:
                response = client.read_holding_registers(_rv.address)
                key = 'holding_register'
            except ConnectionException:
                log_message = ('''\
Cannot connect to target {} to retrieve input register {}, count {}, \
unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit))
                log.log2warning(65032, log_message)
                continue
            except:
                log_message = ('''\
Cause unknown failure with target {} getting holding register {}, count {}, \
unit {}. [{}, {}, {}]\
'''.format(ip_target, _rv.register, _rv.count, _rv.unit,
                sys.exc_info()[0],
                sys.exc_info()[1],
                sys.exc_info()[2]))
                log.log2warning(65031, log_message)
                continue

        # Process data
        if response.isError() is True:
            _log_modbus(ip_target, _rv, response)
        else:
            values = response.registers
            for data_index, _value in enumerate(values):
                # Do multiplication
                value = _value * _rv.multiplier

                # Create DataPoint and append
                new_key = ('{}_{}'.format(key, _rv.register + data_index))
                datapoint = DataPoint(new_key, value, data_type=DATA_INT)
                datapoint.add(DataPointMetadata('unit',
                                                str(_rv.unit).zfill(3)))
                datapoints.append(datapoint)
    ddv.add(datapoints)

    # Return
    return ddv
Пример #16
0
def _create_datapoints(items):
    """Get PATOO_SNMP agent data.

    Update the TargetDataPoints with DataPoints

    Args:
        items: Dict of type SNMPVariable keyed by OID branch

    Returns:
        result: List of DataPoints with metadata added

    Method:
        1) Poll all desired OIDs from the target target. Ignore shutdown
            interfaces
        2) Get the IfAlias, IfName, and ifDescr values for each snmp ifIndex
            to use as metadata for DataPoints
        3) Convert the polled datapoints to use a key of their MIB string
            versus the OID as the key. Use the OID as a metadata value instead.
        4) Add the IfAlias, IfName, and ifDescr values as metadata to
            each datapoint.

    """
    # Initialize key variables
    result = []
    ifindex_lookup = _metadata(items)

    # Process the results
    for key, polled_datapoints in items.items():
        # Ignore keys used to create the ifindex_lookup
        if key in ['ifDescr', 'ifName', 'ifAlias', 'ifIndex', 'ifAdminStatus']:
            continue

        # Evaluate DataPoint list data from remaining keys
        for polled_datapoint in polled_datapoints:
            if polled_datapoint.valid is False:
                continue

            # Reassign DataPoint values
            ifindex = polled_datapoint.key.split('.')[-1]
            if ifindex in ifindex_lookup:

                # Ignore administratively down interfaces
                if bool(ifindex_lookup[ifindex].ifadminstatus) is False:
                    continue

                # Create a new Datapoint keyed by MIB equivalent
                new_key = _key(polled_datapoint.key)
                datapoint = DataPoint(new_key,
                                      polled_datapoint.value,
                                      data_type=polled_datapoint.data_type)

                # Add metadata to the datapoint
                datapoint.add(DataPointMetadata('oid', polled_datapoint.key))
                if bool(ifindex_lookup[ifindex].ifdescr) is True:
                    datapoint.add(
                        DataPointMetadata('ifDescr',
                                          ifindex_lookup[ifindex].ifdescr))
                if bool(ifindex_lookup[ifindex].ifname) is True:
                    datapoint.add(
                        DataPointMetadata('ifName',
                                          ifindex_lookup[ifindex].ifname))

                # Add metadata to the datapoint (Don't update checksum as this
                # value may change over time via configuration)
                if bool(ifindex_lookup[ifindex].ifalias) is True:
                    datapoint.add(
                        DataPointMetadata('ifAlias',
                                          ifindex_lookup[ifindex].ifalias,
                                          update_checksum=False))

                result.append(datapoint)

    return result