Esempio n. 1
0
    def __init__(self, snmpvariable):

        #####################################################################
        # Create a list of ifMIB oid branches to be used to create
        #####################################################################

        # Add simulated ifMIB walk values for interface descriptions
        self.data = {}

        # IfDesc
        self.data['.1.3.6.1.2.1.2.2.1.2'] = [
            DataPoint('.1.3.6.1.2.1.2.2.1.2.1', 'lo', data_type=2),
            DataPoint('.1.3.6.1.2.1.2.2.1.2.2', 'eth0', data_type=2),
            DataPoint('.1.3.6.1.2.1.2.2.1.2.3', 'wlan0', data_type=2)
        ]

        # sysObjectID
        self.data['.1.3.6.1.2.1.1.2.0'] = [
            DataPoint('.1.3.6.1.2.1.1.2.0', '.1.3.6.1.1234', data_type=2)
        ]

        # ifInOctets
        self.data['.1.3.6.1.2.1.2.2.1.10'] = [
            DataPoint('.1.3.6.1.2.1.2.2.1.10.1', 83554391, data_type=32),
            DataPoint('.1.3.6.1.2.1.2.2.1.10.2', 1099211361, data_type=32),
            DataPoint('.1.3.6.1.2.1.2.2.1.10.3', 0, data_type=32)
        ]
        # ifOutOctets
        self.data['.1.3.6.1.2.1.2.2.1.16'] = [
            DataPoint('.1.3.6.1.2.1.2.2.1.16.1', 83596845, data_type=32),
            DataPoint('.1.3.6.1.2.1.2.2.1.16.2', 2788372879, data_type=32),
            DataPoint('.1.3.6.1.2.1.2.2.1.16.3', 0, data_type=32)
        ]
Esempio n. 2
0
def _make_agent_data():
    """Create generate data to post to API server"""
    # Initialize key variables
    config = Config()
    polling_interval = 60
    pattoo_agent_program = 1
    pattoo_agent_polled_target = 2
    pattoo_key = '3'
    pattoo_value = 4

    # We want to make sure we get a different AgentID each time
    filename = files.agent_id_file(pattoo_agent_program, config)
    if os.path.isfile(filename) is True:
        os.remove(filename)

    # Setup AgentPolledData
    apd = AgentPolledData(pattoo_agent_program, polling_interval)

    # Initialize TargetDataPoints
    ddv = TargetDataPoints(pattoo_agent_polled_target)

    # Setup DataPoint
    data_type = DATA_INT
    variable = DataPoint(pattoo_key, pattoo_value, data_type=data_type)

    # Add data to TargetDataPoints
    ddv.add(variable)

    # Create a result
    apd.add(ddv)

    # Return agent data
    return apd
Esempio n. 3
0
    def stats_network(self):
        """Update agent with network data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        result = []

        # Get network utilization
        nicddv = psutil.net_io_counters(pernic=True)
        for nic, nic_named_tuple in nicddv.items():
            nic_dict = nic_named_tuple._asdict()
            for key, value in nic_dict.items():
                _dv = DataPoint('{}_{}'.format('network_io', key),
                                value,
                                data_type=DATA_COUNT64)
                _dv.add(self.metadata)
                _dv.add(
                    DataPointMetadata('{}_interface'.format('network_io'),
                                      nic))
                result.append(_dv)

        # Add the result to data
        return result
Esempio n. 4
0
    def stats_disk_swap(self):
        """Update agent with disk swap data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        result = []

        # Get swap information
        system_list = psutil.swap_memory()._asdict()
        for key, value in system_list.items():
            # Different suffixes have different data types
            if key in ['sin', 'sout']:
                data_type = DATA_COUNT64
            else:
                data_type = DATA_INT

            # No need to specify a suffix as there is only one swap
            new_key = '{}_{}'.format('swap_memory', key)
            _dv = DataPoint(new_key, value, data_type=data_type)
            _dv.add(self.metadata)
            result.append(_dv)

        # Add the result to data
        return result
Esempio n. 5
0
def _named_tuple_to_dv(values,
                       parameter_label,
                       data_type=DATA_INT,
                       metadata=None):
    """Convert a named tuple to a list of DataPoint objects.

    Args:
        values: Named tuple
        parameter_label: parameter_label
        data_type: Data type

    Returns:
        result: List of DataPoint

    """
    # Get data
    data_dict = values._asdict()
    result = []

    # Cycle through results
    for key, value in data_dict.items():
        _dv = DataPoint('{}_{}'.format(parameter_label, key),
                        value,
                        data_type=data_type)
        _dv.add(metadata)
        result.append(_dv)

    # Return
    return result
Esempio n. 6
0
def test_agent():
    # Define the polling interval in seconds (integer).
    polling_interval = 300

    # Give the agent a name
    agent_name = 'sample_agent_script'

    # Let's assume the script has already received this data from SITE_A
    site_a_data = [['ABC', 123.456], ['DEF', 456.789]]

    # Let's assume the script has already received this data from WORK_1
    work_1_data = [['GHI', 654.321], ['JKL', 987.654]]

    # Setup the agent's AgentPolledData object.
    agent = AgentPolledData(agent_name, polling_interval)

    # Let's add some metadata that you don't want to affect charting in the
    # event of a change. Department names change all the time.
    metadata_static = DataPointMetadata('Department Name',
                                        'The Palisadoes Foundation',
                                        update_checksum=False)

    # Let's add some metadata that will change and trigger a new chart.
    metadata_dynamic = DataPointMetadata('Financial Year', '2020')

    # Create target objects for SITE_A
    target = TargetDataPoints('SITE_A')
    for quote in site_a_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Create target objects for WORK_1
    target = TargetDataPoints('WORK_1')
    for quote in work_1_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Return agent
    return agent
Esempio n. 7
0
    def test_datapoints_to_dicts(self):
        """Testing method or function named datapoints_to_dicts."""
        # Initialize key variables
        datapoints = []

        # Create DataPoints
        for value in range(0, 2):
            # Sleep to force a change in the timestamp
            sleep(0.1)

            metadata = []
            for meta in range(0, 2):
                metadata.append(DataPointMetadata(int(meta), str(meta * 2)))

            # Create the datapoint
            datapoint = DataPoint('label_{}'.format(value),
                                  value,
                                  data_type=DATA_INT)
            # Add metadata
            for meta in metadata:
                datapoint.add(meta)

            # Add metadata that should be ignored.
            for key in DATAPOINT_KEYS:
                metadata.append(DataPointMetadata(key, '_{}_'.format(key)))

            # Add the datapoint to the list
            datapoints.append(datapoint)

        # Start testing
        result = converter.datapoints_to_dicts(datapoints)
        expected = {
            'key_value_pairs': {
                0: ('0', '0'),
                1: ('1', '2'),
                2: ('pattoo_key', 'label_0'),
                3: ('pattoo_data_type', 99),
                4: ('pattoo_value', 0),
                5: ('pattoo_timestamp', 1575794447250),
                6: ('pattoo_checksum',
                    '284d21bff49bbde9eb7fc3ad98a88e5cbf72830f69743b47bd2c349'
                    '407807f68'),
                7: ('pattoo_key', 'label_1'),
                8: ('pattoo_value', 1),
                9: ('pattoo_timestamp', 1575915772433),
                10: ('pattoo_checksum',
                     'a5919eb5fc5bac62e7c80bc04155931f75e22166ed84b1d07f704f4'
                     '0b083d098')
            },
            'datapoint_pairs': [[0, 1, 2, 3, 4, 5, 6], [0, 1, 7, 3, 8, 9, 10]]
        }

        self.assertEqual(result['datapoint_pairs'],
                         expected['datapoint_pairs'])
        for key, value in result['key_value_pairs'].items():
            if key not in [5, 9]:
                self.assertEqual(expected['key_value_pairs'][key], value)
Esempio n. 8
0
    def test_agentdata_to_post(self):
        """Testing method or function named agentdata_to_post."""
        # Setup AgentPolledData
        agent_program = 'panda_bear'
        polling_interval = 20
        apd = AgentPolledData(agent_program, polling_interval)

        # Initialize TargetDataPoints
        target = 'teddy_bear'
        ddv = TargetDataPoints(target)

        # Setup DataPoint
        value = 457
        key = 'gummy_bear'
        data_type = DATA_INT
        variable = DataPoint(key, value, data_type=data_type)

        # Add data to TargetDataPoints
        ddv.add(variable)

        # Test TargetGateway to AgentPolledData
        apd.add(ddv)
        result = converter.agentdata_to_post(apd)
        self.assertEqual(result.pattoo_agent_id, apd.agent_id)
        self.assertEqual(
            result.pattoo_agent_polling_interval, polling_interval * 1000)
        self.assertTrue(isinstance(result.pattoo_datapoints, dict))

        # Test the key value pairs
        item = result.pattoo_datapoints['key_value_pairs']
        self.assertTrue('datapoint_pairs' in result.pattoo_datapoints)
        self.assertTrue('key_value_pairs' in result.pattoo_datapoints)
        self.assertTrue(isinstance(item, dict))

        # Convert item to a list of tuples for ease of testing
        tester = [(k, v) for k, v in sorted(item.items())]
        self.assertEqual(
            tester[0],
            (0, ('pattoo_agent_polling_interval', '20000')))

        self.assertEqual(
            tester[3:8],
            [
                (3, ('pattoo_agent_polled_target', 'teddy_bear')),
                (4, ('pattoo_agent_program', 'panda_bear')),
                (5, ('pattoo_key', 'gummy_bear')),
                (6, ('pattoo_data_type', 99)),
                (7, ('pattoo_value', 457))]
        )

        # Test the pointers to the key value pairs
        item = result.pattoo_datapoints['datapoint_pairs']
        self.assertTrue(isinstance(item, list))
        self.assertEqual(len(item), 1)
        self.assertEqual(len(item[0]), 10)
Esempio n. 9
0
    def stats_disk_partitions(self):
        """Update agent with disk partition data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        result = []

        # Get filesystem partition utilization
        items = psutil.disk_partitions()
        # "items" is a list of named tuples describing partitions
        for item in items:
            # "source" is the partition mount point
            mountpoint = item.mountpoint
            if "docker" not in str(mountpoint):
                # Add more metadata
                meta = []
                meta.append(
                    DataPointMetadata('{}_device'.format('disk_partition'),
                                      item.device))
                meta.append(
                    DataPointMetadata('{}_mountpoint'.format('disk_partition'),
                                      item.mountpoint))
                meta.append(
                    DataPointMetadata('{}_fstype'.format('disk_partition'),
                                      item.fstype))
                meta.append(
                    DataPointMetadata('{}_opts'.format('disk_partition'),
                                      item.opts))

                # Get the partition data. Skip if the data is unreadable
                # due to permissions (eg. External storage mounted by users)
                try:
                    partition = psutil.disk_usage(mountpoint)._asdict()
                except:
                    continue

                for key, value in partition.items():
                    _dv = DataPoint('{}_disk_usage_{}'.format(
                        'disk_partition', key),
                                    value,
                                    data_type=DATA_INT)
                    _dv.add(meta)
                    _dv.add(self.metadata)
                    result.append(_dv)

        # Add the result to data
        return result
Esempio n. 10
0
    def _get_target_datapoints(self, item):
        """Poll each spoke in parallel.

        Args:
            item: TargetPollingPoints object

        Returns:
            ddv: TargetDataPoints for the SNMPVariable target

        """
        # Intialize data gathering
        ip_target = item.target
        ddv = TargetDataPoints(ip_target)

        # BAC0 only works with IP addresses
        ip_address = network.get_ipaddress(ip_target)
        if bool(ip_address) is False:
            return ddv

        # Get list of type DataPoint
        datapoints = []
        for polltarget in item.data:
            # Get polling results
            value = poll_target_address(ip_address, polltarget.address,
                                        'presentValue', self._bacnet)
            name = poll_target_address(ip_address, polltarget.address,
                                       'objectName', self._bacnet)

            # Skip if invalid data is received
            if value is None:
                continue

            # Do multiplication
            if data.is_numeric(value) is True:
                value = float(value) * polltarget.multiplier
                data_type = DATA_FLOAT
            else:
                data_type = DATA_STRING

            # Update datapoints
            datapoint = DataPoint('analog_value_point_{}'.format(
                polltarget.address),
                                  value,
                                  data_type=data_type)
            datapoint.add(DataPointMetadata('target', ip_target))
            if name is not None:
                datapoint.add(DataPointMetadata('object_name', name))
            datapoints.append(datapoint)

        # Return
        ddv.add(datapoints)
        return ddv
Esempio n. 11
0
    def test_add(self):
        """Testing function append."""
        # Setup AgentPolledData
        agent_program = 'panda_bear'
        polling_interval = 20
        apd = AgentPolledData(agent_program, polling_interval)

        # Initialize TargetDataPoints
        target = 'teddy_bear'
        ddv = TargetDataPoints(target)
        self.assertEqual(ddv.target, target)
        self.assertFalse(ddv.valid)
        self.assertEqual(ddv.data, [])

        # Setup DataPoint
        value = 457
        _key_ = 'gummy_bear'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Add data to TargetDataPoints
        self.assertFalse(ddv.valid)
        ddv.add(variable)
        self.assertTrue(ddv.valid)

        # Test add
        self.assertFalse(apd.valid)
        apd.add(None)
        self.assertFalse(apd.valid)
        apd.add(variable)
        self.assertFalse(apd.valid)
        apd.add(ddv)
        self.assertTrue(apd.valid)

        # Test contents
        data = apd.data
        self.assertTrue(isinstance(data, list))
        self.assertEqual(len(data), 1)

        _ddv = data[0]
        self.assertTrue(isinstance(_ddv, TargetDataPoints))
        self.assertEqual(_ddv.target, target)
        self.assertTrue(_ddv.valid)
        self.assertTrue(isinstance(_ddv.data, list))
        self.assertTrue(len(_ddv.data), 1)

        data = _ddv.data
        _variable = _ddv.data[0]
        self.assertEqual(_variable.data_type, data_type)
        self.assertEqual(_variable.value, value)
        self.assertEqual(_variable.key, _key_)
Esempio n. 12
0
def create_cache():
    """Testing method / function records."""
    # Initialize key variables
    config = ServerConfig()
    polling_interval = 20
    cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME)
    result = {
        'pattoo_agent_program': data.hashstring(str(random())),
        'pattoo_agent_polled_target': socket.getfqdn(),
        'pattoo_key': data.hashstring(str(random())),
        'pattoo_value': round(uniform(1, 100), 5),
        'pattoo_agent_hostname': socket.getfqdn()
    }

    # We want to make sure we get a different AgentID each time
    filename = files.agent_id_file(
        result['pattoo_agent_program'],
        config)
    if os.path.isfile(filename) is True:
        os.remove(filename)
    result['pattoo_agent_id'] = files.get_agent_id(
        result['pattoo_agent_program'],
        config)

    # Setup AgentPolledData
    apd = AgentPolledData(result['pattoo_agent_program'], polling_interval)

    # Initialize TargetDataPoints
    ddv = TargetDataPoints(result['pattoo_agent_hostname'])

    # Setup DataPoint
    data_type = DATA_INT
    variable = DataPoint(
        result['pattoo_key'], result['pattoo_value'], data_type=data_type)

    # Add data to TargetDataPoints
    ddv.add(variable)

    # Write data to cache
    apd.add(ddv)
    cache_dict = converter.posting_data_points(
        converter.agentdata_to_post(apd))
    cache_file = '{}{}cache_test.json'.format(cache_directory, os.sep)
    with open(cache_file, 'w') as _fp:
        json.dump(cache_dict, _fp)

    return result
Esempio n. 13
0
    def test___repr__(self):
        """Testing function __repr__."""
        # Need to see all the string output
        self.maxDiff = None

        # Setup DataPoint
        value = 10
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test
        expected = ('''\
<DataPoint key='testing', value=10, data_type=99, \
timestamp={}, valid=True>'''.format(variable.timestamp))
        result = variable.__repr__()
        self.assertEqual(result, expected)
Esempio n. 14
0
    def test_agentdata_to_datapoints(self):
        """Testing method or function named agentdata_to_datapoints."""
        # Setup AgentPolledData
        agent_program = 'panda_bear'
        polling_interval = 20
        apd = AgentPolledData(agent_program, polling_interval)

        # Initialize TargetDataPoints
        target = 'teddy_bear'
        ddv = TargetDataPoints(target)

        # Setup DataPoint
        value = 457
        key = 'gummy_bear'
        data_type = DATA_INT
        variable = DataPoint(key, value, data_type=data_type)

        # Add data to TargetDataPoints
        ddv.add(variable)

        # Test TargetGateway to AgentPolledData
        apd.add(ddv)

        # Test contents
        expected_metadata = {
            'pattoo_agent_id': apd.agent_id,
            'pattoo_agent_program': agent_program,
            'pattoo_agent_hostname': apd.agent_hostname,
            'pattoo_agent_polled_target': target,
            'pattoo_agent_polling_interval': apd.agent_polling_interval
        }
        result = converter.agentdata_to_datapoints(apd)

        self.assertEqual(len(result), 1)
        item = result[0]
        self.assertTrue(isinstance(item, DataPoint))
        self.assertEqual(item.value, value)
        self.assertEqual(item.data_type, DATA_INT)
        self.assertEqual(item.key, key)
        self.assertTrue(isinstance(item.metadata, dict))
        self.assertEqual(len(item.metadata), len(expected_metadata))
        for key, value in item.metadata.items():
            self.assertTrue(isinstance(value, str))
            self.assertTrue(isinstance(key, str))
            self.assertEqual(value, str(expected_metadata[key]))
Esempio n. 15
0
    def test_datapoints_to_post(self):
        """Testing method or function named datapoints_to_post."""
        # Initialize key variables
        key = '_key'
        value = '_value'
        datapoints = [DataPoint(key, value)]
        source = '1234'
        polling_interval = 20
        result = converter.datapoints_to_post(
            source, polling_interval, datapoints)

        # Test
        self.assertEqual(
            result.pattoo_agent_polling_interval, polling_interval)
        self.assertEqual(result.pattoo_agent_id, source)
        self.assertEqual(result.pattoo_datapoints, datapoints)
        self.assertEqual(result.pattoo_datapoints[0].key, key)
        self.assertEqual(result.pattoo_datapoints[0].value, value)
Esempio n. 16
0
def _multiply_octets(datapoints):
    """Multiply datapoint value by 8.

    Args:
        datapoints: List of Datapoint objects to multiply

    Returns:
        result: Datapoint with result multiplied by 8

    """
    # Initialize key variables
    result = []

    # Get interface data
    for datapoint in datapoints:
        new_value = datapoint.value * 8
        result.append(
            DataPoint(datapoint.key, new_value, data_type=datapoint.data_type))
    return result
Esempio n. 17
0
def _walker(snmpvariable, polltargets):
    """Poll each spoke in parallel.

    Args:
        snmpvariable: SNMPVariable to poll
        polltargets: List of PollingPoint objects to poll

    Returns:
        ddv: TargetDataPoints for the SNMPVariable target

    """
    # Intialize data gathering
    ddv = TargetDataPoints(snmpvariable.ip_target)

    # Get list of type DataPoint
    datapoints = []
    for polltarget in polltargets:
        # Get OID polling results
        query = snmp.SNMP(snmpvariable)
        query_datapoints = query.walk(polltarget.address)

        # Apply multiplier to the results
        for _dp in query_datapoints:
            # Do multiplication
            if data.is_data_type_numeric(_dp.data_type) is True:
                value = float(_dp.value) * polltarget.multiplier
            else:
                value = _dp.value

            # Update datapoints
            datapoint = DataPoint(polltarget.address,
                                  value,
                                  data_type=_dp.data_type)
            datapoint.add(DataPointMetadata('oid', _dp.key))
            datapoints.append(datapoint)

    # Return
    ddv.add(datapoints)
    return ddv
Esempio n. 18
0
    def stats_disk_io(self):
        """Update agent with disk io data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            None

        """
        # Initialize key variables
        regex = re.compile(r'^ram\d+$')
        result = []

        # Get disk I/O usage. Skip if the data is unreadable
        # due to permissions (eg. External storage mounted by users)
        ioddv = psutil.disk_io_counters(perdisk=True)

        # "source" is disk name
        for disk, disk_named_tuple in ioddv.items():
            # No RAM pseudo disks. RAM disks OK.
            if bool(regex.match(disk)) is True:
                continue
            # No loopbacks
            if disk.startswith('loop') is True:
                continue

            # Populate data
            disk_dict = disk_named_tuple._asdict()
            for key, value in disk_dict.items():
                new_key = '{}_{}'.format('disk_io', key)
                _dv = DataPoint(new_key, value, data_type=DATA_COUNT64)
                _dv.add(self.metadata)
                _dv.add(DataPointMetadata('disk_partition', disk))
                result.append(_dv)

        # Add the result to data
        return result
Esempio n. 19
0
    def test_add(self):
        """Testing function append."""
        # Initialize TargetDataPoints
        target = 'teddy_bear'
        ddv = TargetDataPoints(target)
        self.assertEqual(ddv.target, target)
        self.assertFalse(ddv.valid)
        self.assertEqual(ddv.data, [])

        # Setup DataPoint
        value = 457
        _key_ = 'gummy_bear'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test adding invalid value
        ddv.add(None)
        self.assertEqual(ddv.data, [])

        # Test adding variable
        ddv.add(variable)
        self.assertTrue(bool(ddv.data))
        self.assertTrue(isinstance(ddv.data, list))
        self.assertEqual(len(ddv.data), 1)
        checksum = ddv.data[0].checksum

        # Test adding duplicate variable (There should be no changes)
        ddv.add(variable)
        self.assertTrue(bool(ddv.data))
        self.assertTrue(isinstance(ddv.data, list))
        self.assertEqual(len(ddv.data), 1)
        self.assertEqual(checksum, ddv.data[0].checksum)

        # Test the values in the variable
        _variable = ddv.data[0]
        self.assertEqual(_variable.data_type, data_type)
        self.assertEqual(_variable.value, value)
        self.assertEqual(_variable.key, _key_)
Esempio n. 20
0
def _serial_poller(drv):
    """Poll each spoke in parallel.

    Args:
        drv: Target to poll
        input_registers: Input registers to poll
        holding_registers: Holding registers to poll

    Returns:
        ddv: TargetDataPoints for the ip_target

    """
    # Intialize data gathering
    ip_target = drv.target
    ddv = TargetDataPoints(ip_target)

    # Get list of type DataPoint
    datapoints = []
    for _rv in drv.data:
        # Ignore invalid data
        if isinstance(_rv, RegisterVariable) is False:
            continue
        if _rv.valid is False:
            continue

        # Poll
        client = ModbusTcpClient(ip_target)
        if isinstance(_rv, InputRegisterVariable):
            try:
                response = client.read_input_registers(_rv.address,
                                                       count=_rv.count,
                                                       unit=_rv.unit)
                key = 'input_register'
            except ConnectionException as _err:
                log_message = ('''\
Cannot connect to target {} to retrieve input register {}, count {}, \
unit {}: {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit, str(_err)))
                log.log2warning(65028, log_message)
                continue
            except:
                log_message = ('''\
Cause unknown failure with target {} getting input register {}, count {}, \
unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit))
                log.log2warning(65030, log_message)
                continue
        elif isinstance(_rv, HoldingRegisterVariable):
            try:
                response = client.read_holding_registers(_rv.address)
                key = 'holding_register'
            except ConnectionException:
                log_message = ('''\
Cannot connect to target {} to retrieve input register {}, count {}, \
unit {}'''.format(ip_target, _rv.register, _rv.count, _rv.unit))
                log.log2warning(65032, log_message)
                continue
            except:
                log_message = ('''\
Cause unknown failure with target {} getting holding register {}, count {}, \
unit {}. [{}, {}, {}]\
'''.format(ip_target, _rv.register, _rv.count, _rv.unit,
                sys.exc_info()[0],
                sys.exc_info()[1],
                sys.exc_info()[2]))
                log.log2warning(65031, log_message)
                continue

        # Process data
        if response.isError() is True:
            _log_modbus(ip_target, _rv, response)
        else:
            values = response.registers
            for data_index, _value in enumerate(values):
                # Do multiplication
                value = _value * _rv.multiplier

                # Create DataPoint and append
                new_key = ('{}_{}'.format(key, _rv.register + data_index))
                datapoint = DataPoint(new_key, value, data_type=DATA_INT)
                datapoint.add(DataPointMetadata('unit',
                                                str(_rv.unit).zfill(3)))
                datapoints.append(datapoint)
    ddv.add(datapoints)

    # Return
    return ddv
Esempio n. 21
0
async def _serial_poller_async(tpp):
    """Poll OPCUA agent data.

    Args:
        tpp: TargetDataPoints object

    Returns:
        target_datapoints: TargetDataPoints object

    """
    # Initialize key variables
    connected = False

    # Test for validity
    if isinstance(tpp, TargetPollingPoints) is False:
        return None
    if isinstance(tpp.target, OPCUAauth) is False:
        return None
    if tpp.valid is False:
        return None

    # Create URL for polling
    ip_target = tpp.target.ip_target
    ip_port = tpp.target.ip_port
    username = tpp.target.username
    password = tpp.target.password
    url = 'opc.tcp://{}:{}'.format(ip_target, ip_port)

    # Intialize data gathering
    target_datapoints = TargetDataPoints(ip_target)

    # Create a client object to connect to OPCUA server
    client = Client(url=url)
    client.set_user(username)
    client.set_password(password)

    # Connect
    try:
        await client.connect()
        connected = True
    except:
        log_message = (
            'Authentication for polling target {} is incorrect'.format(url))
        log.log2warning(51011, log_message)
        pass

    if connected is True:
        for point in tpp.data:
            # Make sure we have the right data type
            if isinstance(point, PollingPoint) is False:
                log_message = ('''\
Invalid polling point {} for OPC UA URL {}'''.format(point, url))
                log.log2info(51012, log_message)
                continue

            # Get data
            address = point.address
            try:
                node = client.get_node(address)
                value = await node.read_value()
            except BadNodeIdUnknown:
                log_message = ('''\
OPC UA node {} not found on server {}'''.format(address, url))
                log.log2warning(51015, log_message)
                continue
            except:
                _exception = sys.exc_info()
                log_message = ('OPC UA server communication error')
                log.log2exception(51014, _exception, message=log_message)
                log_message = ('''\
Cannot get value from polling point {} for OPC UA URL {}\
'''.format(address, url))
                log.log2info(51013, log_message)
                continue

            # Create datapoint
            if bool(point.multiplier) is True:
                if is_numeric(value) is True and (is_numeric(point.multiplier)
                                                  is True):
                    value = value * point.multiplier
            else:
                value = 0
            datapoint = DataPoint(address, value)
            datapoint.add(DataPointMetadata('OPCUA Server', ip_target))
            target_datapoints.add(datapoint)

        # Disconnect client
        await client.disconnect()

    return target_datapoints
Esempio n. 22
0
    def test___init__(self):
        """Testing function __init__."""
        # Setup DataPoint - Valid
        value = 1093454
        _key_ = 'testing'
        _metakey = '_{}'.format(_key_)
        timestamp = int(time.time() * 1000)
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertTrue(variable.timestamp >= timestamp)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - Valid
        value = 1093454
        timestamp = 7
        _key_ = 'testing'
        _metakey = '_{}'.format(_key_)
        data_type = DATA_INT
        variable = DataPoint(_key_,
                             value,
                             data_type=data_type,
                             timestamp=timestamp)
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.timestamp, timestamp)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Add metadata that should be ignored.
        for key in DATAPOINT_KEYS:
            variable.add(DataPointMetadata(key, '_{}_'.format(key)))
        variable.add(DataPointMetadata(_metakey, _metakey))

        # Test each variable (unchanged)
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
306353a04200e3b889b18c6f78dd8e56a63a287218ec8424e22d31b4b961a905''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - invalid data_type
        value = 1093454
        _key_ = 'testing'
        data_type = 123
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.valid, False)

        # Setup DataPoint - invalid value for numeric data_type
        value = '_123'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, value)
        self.assertEqual(variable.key, _key_)
        self.assertEqual(variable.valid, False)

        # Setup DataPoint - valid value for integer data_type but
        # string for value
        value = '1093454'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, int(value))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
7f99301d9be275b14af5626ffabe22a154415ed2ef7dad37f1707bd25b6afdc6''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for int data_type but
        # string for value
        value = '1093454.3'
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, int(float(value)))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
7f99301d9be275b14af5626ffabe22a154415ed2ef7dad37f1707bd25b6afdc6''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for int data_type but
        # string for value
        value = '1093454.3'
        _key_ = 'testing'
        data_type = DATA_FLOAT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test each variable
        self.assertEqual(variable.data_type, data_type)
        self.assertEqual(variable.value, float(value))
        self.assertEqual(variable.key, _key_)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
ab48bdc902e2ea5476a54680a7ace0971ab90edb3f6ffe00a89b2d1e17b1548d''')
        self.assertEqual(variable.valid, True)

        # Setup DataPoint - valid value for str data_type
        for value in [0, 1, '1093454.3']:
            _key_ = 'testing'
            data_type = DATA_STRING
            variable = DataPoint(_key_, value, data_type=data_type)

            # Test each variable
            self.assertEqual(variable.data_type, data_type)
            self.assertEqual(variable.value, str(value))
            self.assertEqual(variable.key, _key_)
            self.assertEqual(len(variable.checksum), 64)
            self.assertEqual(
                variable.checksum, '''\
431111472993bf4d9b8b347476b79321fea8a337f3c1cb2fedaa185b54185540''')
            self.assertEqual(variable.valid, True)

        # Setup DataPoint - invalid value for str data_type
        for value in [True, False, None]:
            _key_ = 'testing'
            data_type = DATA_STRING
            variable = DataPoint(_key_, value, data_type=data_type)

            # Test each variable
            self.assertEqual(variable.data_type, data_type)
            self.assertEqual(variable.valid, False)
            self.assertEqual(variable.value, str(value))
            self.assertIsNone(variable.key)
            self.assertEqual(len(variable.checksum), 64)
            self.assertEqual(
                variable.checksum, '''\
a783370f88d8c54b5f5e6641af69d86dae5d4d62621d55cf7e63f6c66644c214''')
Esempio n. 23
0
    def stats_system(self):
        """Update agent with system data.

        Args:
            ddv: TargetDataPoints object

        Returns:
            result: List of DataPoint objects

        """
        #######################################################################
        # Set timeseries values (Integers)
        #######################################################################
        result = []

        result.append(
            DataPoint('process_count', len(psutil.pids()),
                      data_type=DATA_INT).add(self.metadata))

        # Load averages
        (la_01, la_05, la_15) = os.getloadavg()

        result.append(
            DataPoint('load_average_01min', la_01,
                      data_type=DATA_INT).add(self.metadata))

        result.append(
            DataPoint('load_average_05min', la_05,
                      data_type=DATA_INT).add(self.metadata))

        result.append(
            DataPoint('load_average_15min', la_15,
                      data_type=DATA_INT).add(self.metadata))

        #######################################################################
        # Set timeseries values (Floats)
        #######################################################################

        result.append(
            DataPoint('cpu_frequency',
                      psutil.cpu_freq().current,
                      data_type=DATA_FLOAT).add(self.metadata))

        #######################################################################
        # Set timeseries values (Named Tuples)
        #######################################################################

        # Percentage CPU utilization
        result.extend(
            _named_tuple_to_dv(psutil.cpu_times_percent(),
                               'cpu_times_percent',
                               data_type=DATA_FLOAT,
                               metadata=self.metadata))

        # Get CPU runtimes
        result.extend(
            _named_tuple_to_dv(psutil.cpu_times(),
                               'cpu_times',
                               data_type=DATA_COUNT64,
                               metadata=self.metadata))

        # Get CPU stats
        result.extend(
            _named_tuple_to_dv(psutil.cpu_stats(),
                               'cpu_stats',
                               data_type=DATA_COUNT64,
                               metadata=self.metadata))

        # Get memory utilization
        result.extend(
            _named_tuple_to_dv(psutil.virtual_memory(),
                               'memory',
                               data_type=DATA_INT,
                               metadata=self.metadata))

        # Return
        return result
Esempio n. 24
0
def _convert_results(inbound):
    """Convert results from easysnmp.variables.SNMPVariable to DataPoint.

    Args:
        inbound: SNMP query result as list of easysnmp.variables.SNMPVariable

    Returns:
        outbound: DataPoint formatted equivalent

    """
    # Initialize key variables
    outbound = []

    # Format the results to DataPoint format
    for item in inbound:
        # Initialize loop variables
        converted = None
        snmp_type = item.snmp_type
        data_type = DATA_INT

        # Convert string type values to bytes
        if snmp_type.upper() == 'OCTETSTR':
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'OPAQUE':
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'BITS':
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'IPADDR':
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'NETADDR':
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'OBJECTID':
            # DO NOT CHANGE !!!
            # converted = bytes(str(value), 'utf-8')
            converted = item.value
            data_type = DATA_STRING
        elif snmp_type.upper() == 'NOSUCHOBJECT':
            # Nothing if OID not found
            converted = None
            data_type = DATA_NONE
        elif snmp_type.upper() == 'NOSUCHINSTANCE':
            # Nothing if OID not found
            converted = None
            data_type = DATA_NONE
        elif snmp_type.upper() == 'ENDOFMIBVIEW':
            # Nothing
            converted = None
            data_type = DATA_NONE
        elif snmp_type.upper() == 'NULL':
            # Nothing
            converted = None
            data_type = DATA_NONE
        elif snmp_type.upper() == 'COUNTER':
            # Numeric values
            converted = int(item.value)
            data_type = DATA_COUNT
        elif snmp_type.upper() == 'COUNTER64':
            # Numeric values
            converted = int(item.value)
            data_type = DATA_COUNT64
        else:
            # Convert everything else into integer values
            # rfc1902.Integer
            # rfc1902.Integer32
            # rfc1902.Gauge32
            # rfc1902.Unsigned32
            # rfc1902.TimeTicks
            converted = int(item.value)

        # Convert result to DataPoint
        key = '{}.{}'.format(item.oid, item.oid_index)
        datapoint = DataPoint(key, converted, data_type=data_type)

        # Append to outbound result
        outbound.append(datapoint)

    # Return
    return outbound
Esempio n. 25
0
def main():
    """Post data to pattoo server.

    Args:
        None

    Returns:
        None

    """
    '''
    NOTE:

    Scripts must be run at regular intervals and the polling_interval
    should be automatically provided to the main() function.

    Notes about CRON:
    When using cron, change this value to match the cron interval in seconds.
    It is not advised to use cron for polling unless you know the interval
    will not change. If using cron, remember to make the polling interval to
    match the cron interval in 'seconds'.

    Ideally your agents should run as daemons, not as cron jobs. See the daemon
    example script which explains how to do this.
    '''

    # Define the polling interval in seconds (integer).
    polling_interval = 300

    # Give the agent a name
    agent_name = 'sample_agent_script'

    # Let's assume the script has already received this data from SITE_A
    site_a_data = [['ABC', 123.456], ['DEF', 456.789]]

    # Let's assume the script has already received this data from WORK_1
    work_1_data = [['GHI', 654.321], ['JKL', 987.654]]
    '''
    NOTE:

    The AgentPolledData object contains unique identification information
    that the pattoo server will use to differentiate the information source.
    This includes: the hostname of the system on which the object was created,
    a unique hashed identifier stored in the cache directory of the agent
    configuration.

    You just need to write an agent to do one thing well, such data collection
    from a type of target source. For example, you don't need to give each
    agent that does the same thing a different "agent_name".  Just make sure
    that different types of agents have different "agent_name" values.

    The PattooShared library will take care of the rest.
    '''

    # Setup the agent's AgentPolledData object.
    agent = AgentPolledData(agent_name, polling_interval)
    '''
    NOTE:

    Metadata is normally expected to stay constant. If it changes then the
    datapoint ID changes and you'll start plotting a brand new chart on the
    pattoo server. The old chart will stop at the time the metadata changed.

    In some cases, you may not want changing metadata to cause a brand new
    plot. For example, your metadata for computer resource charting may include
    the operating system version. This is useful background information, but
    shouldn't impact charting if it changes. This type of metadata should be
    dynamic.

    '''
    # Let's add some metadata that you don't want to affect charting in the
    # event of a change. Department names change all the time.
    metadata_static = DataPointMetadata('Department Name',
                                        'The Palisadoes Foundation',
                                        update_checksum=False)

    # Let's add some metadata that will change and trigger a new chart.
    metadata_dynamic = DataPointMetadata('Financial Year', '2020')

    # Create target objects for SITE_A
    target = TargetDataPoints('SITE_A')
    for quote in site_a_data:
        key, value = quote
        '''
        NOTE:

        You don't have to specify the time when the data was collected.
        The DataPoint object captures that information automatically. You can
        also specify it using the timestamp= argument. See the class
        documentation for details.

        The default data_type is DATA_INT (integer). Read the documentation
        for the various other data which cover float and counter values.
        '''
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Create target objects for WORK_1
    target = TargetDataPoints('WORK_1')
    for quote in work_1_data:
        key, value = quote
        datapoint = DataPoint(key, value, data_type=DATA_FLOAT)
        datapoint.add(metadata_static)
        datapoint.add(metadata_dynamic)
        target.add(datapoint)
    agent.add(target)

    # Post the data to pattoo
    post = PostAgent(agent)
    post.post()
Esempio n. 26
0
    def test_add(self):
        """Testing function add."""
        # Setup DataPoint - Valid
        value = 1093454
        _key_ = 'testing'
        data_type = DATA_INT
        variable = DataPoint(_key_, value, data_type=data_type)

        # Test adding
        for key, value in [(1, 2), (3, 4), (5, 6)]:
            metadata = DataPointMetadata(key, value)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 3)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding duplicates (no change)
        for key, value in [(1, 2), (3, 4), (5, 6)]:
            metadata = DataPointMetadata(key, value)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 3)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to False. No change
        for key, value in [(10, 20), (30, 40), (50, 60)]:
            metadata = DataPointMetadata(key, value, update_checksum=False)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 6)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to True. No change,
        # as they have been added already.
        for key, value in [(10, 20), (30, 40), (50, 60)]:
            metadata = DataPointMetadata(key, value, update_checksum=True)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 6)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
73ce7225ca1ea55f53c96991c9922a185cf695224b94f2051b8a853049ba1935''')

        # Test adding with now update_checksum set to True
        for key, value in [(11, 21), (31, 41), (51, 61)]:
            metadata = DataPointMetadata(key, value, update_checksum=True)
            variable.add(metadata)
            self.assertEqual(variable.metadata[str(key)], str(value))

        self.assertEqual(len(variable.metadata), 9)
        self.assertEqual(len(variable.checksum), 64)
        self.assertEqual(
            variable.checksum, '''\
2518ce8c9dc0683ef87a6a438c8c79c2ae3fd8ffd38032b6c1d253057d04c8f7''')
Esempio n. 27
0
def _create_datapoints(items):
    """Get PATOO_SNMP agent data.

    Update the TargetDataPoints with DataPoints

    Args:
        items: Dict of type SNMPVariable keyed by OID branch

    Returns:
        result: List of DataPoints with metadata added

    Method:
        1) Poll all desired OIDs from the target target. Ignore shutdown
            interfaces
        2) Get the IfAlias, IfName, and ifDescr values for each snmp ifIndex
            to use as metadata for DataPoints
        3) Convert the polled datapoints to use a key of their MIB string
            versus the OID as the key. Use the OID as a metadata value instead.
        4) Add the IfAlias, IfName, and ifDescr values as metadata to
            each datapoint.

    """
    # Initialize key variables
    result = []
    ifindex_lookup = _metadata(items)

    # Process the results
    for key, polled_datapoints in items.items():
        # Ignore keys used to create the ifindex_lookup
        if key in ['ifDescr', 'ifName', 'ifAlias', 'ifIndex', 'ifAdminStatus']:
            continue

        # Evaluate DataPoint list data from remaining keys
        for polled_datapoint in polled_datapoints:
            if polled_datapoint.valid is False:
                continue

            # Reassign DataPoint values
            ifindex = polled_datapoint.key.split('.')[-1]
            if ifindex in ifindex_lookup:

                # Ignore administratively down interfaces
                if bool(ifindex_lookup[ifindex].ifadminstatus) is False:
                    continue

                # Create a new Datapoint keyed by MIB equivalent
                new_key = _key(polled_datapoint.key)
                datapoint = DataPoint(new_key,
                                      polled_datapoint.value,
                                      data_type=polled_datapoint.data_type)

                # Add metadata to the datapoint
                datapoint.add(DataPointMetadata('oid', polled_datapoint.key))
                if bool(ifindex_lookup[ifindex].ifdescr) is True:
                    datapoint.add(
                        DataPointMetadata('ifDescr',
                                          ifindex_lookup[ifindex].ifdescr))
                if bool(ifindex_lookup[ifindex].ifname) is True:
                    datapoint.add(
                        DataPointMetadata('ifName',
                                          ifindex_lookup[ifindex].ifname))

                # Add metadata to the datapoint (Don't update checksum as this
                # value may change over time via configuration)
                if bool(ifindex_lookup[ifindex].ifalias) is True:
                    datapoint.add(
                        DataPointMetadata('ifAlias',
                                          ifindex_lookup[ifindex].ifalias,
                                          update_checksum=False))

                result.append(datapoint)

    return result