def test_get_response(self, mock_socket): mock_socket.recv.side_effect = (self.resp_header, self.resp_body) zs = ZabbixSender() result = zs._get_response(mock_socket) mock_socket.recv.assert_has_calls([call(92)]) self.assertEqual(result['response'], 'success')
def test_get_response_fail_s_close(self, mock_socket): mock_socket.recv.side_effect = (b'IDDQD', self.resp_body) mock_socket.close.side_effect = Exception zs = ZabbixSender() result = zs._get_response(mock_socket) self.assertFalse(result)
def test_create_messages(self): m = [ZabbixMetric('host1', 'key1', 1), ZabbixMetric('host2', 'key2', 2)] zs = ZabbixSender() result = zs._create_messages(m) self.assertIsInstance(result, list) self.assertEqual(len(result), 2)
def configure(self, configuration): BaseThreadedModule.configure(self, configuration) self.hostname = self.getConfigurationValue("hostname") self.fields = self.getConfigurationValue("fields") self.field_prefix = self.getConfigurationValue("field_prefix") self.timestamp_field = self.getConfigurationValue("timestamp_field") self.batch_size = self.getConfigurationValue('batch_size') self.backlog_size = self.getConfigurationValue('backlog_size') self.agent_conf = self.getConfigurationValue("agent_conf") if self.agent_conf: if self.agent_conf is True: self.agent_conf = "/etc/zabbix/zabbix_agentd.conf" if not os.path.isfile(self.agent_conf): self.logger.error("%s does not point to an existing file." % self.agent_conf) self.lumbermill.shutDown() self.zabbix_sender = ZabbixSender(use_config=self.agent_conf) else: self.logger.error("asdads") server = self.getConfigurationValue("server") port = 10051 if ":" in self.server: server, port = self.server.split(":") self.zabbix_sender = ZabbixSender(zabbix_server=server, port=port) self.buffer = Buffer(self.getConfigurationValue('batch_size'), self.storeData, self.getConfigurationValue('store_interval_in_secs'), maxsize=self.getConfigurationValue('backlog_size'))
def test_send_sendall_exception(self, mock_socket): mock_socket.return_value = mock_socket mock_socket.sendall.side_effect = Exception zm = ZabbixMetric('host1', 'key1', 100500, 1457358608) zs = ZabbixSender() with self.assertRaises(Exception): zs.send([zm])
def test_recive(self, mock_socket): mock_data = b'\x01\\\x00\x00\x00\x00\x00\x00\x00' mock_socket.recv.side_effect = (False, b'ZBXD', mock_data) zs = ZabbixSender() result = zs._receive(mock_socket, 13) self.assertEqual(result, b'ZBXD' + mock_data) self.assertEqual(mock_socket.recv.call_count, 3) mock_socket.recv.assert_has_calls([call(13), call(13), call(9)])
def test_send(self, mock_socket): mock_data = b'\x01\\\x00\x00\x00\x00\x00\x00\x00' mock_socket.return_value = mock_socket mock_socket.recv.side_effect = (b'ZBXD', mock_data, self.resp_body) zm = ZabbixMetric('host1', 'key1', 100500, 1457358608) zs = ZabbixSender() result = zs.send([zm]) self.assertTrue(result)
def test_create_request_failed(self): message = [ '{"clock": "1457445366", "host: \ "host1", "value": "1", "key": "key1"}', '{"clock": "1457445366", "host": \ "host2", "value": "2", "key": "key2"}'] zs = ZabbixSender() result = zs._create_request(message) with self.assertRaises(Exception): result = json.loads(result.decode())
def test_create_request(self): message = [ '{"clock": "1457445366", "host": "host1",\ "value": "1", "key": "key1"}', '{"clock": "1457445366", "host": "host2",\ "value": "2", "key": "key2"}'] zs = ZabbixSender() result = zs._create_request(message) self.assertIsInstance(result, bytes) result = json.loads(result.decode()) self.assertEqual(result['request'], 'sender data') self.assertEqual(len(result['data']), 2)
def test_send(self, mock_socket): mock_data = b'\x01\\\x00\x00\x00\x00\x00\x00\x00' mock_socket.return_value = mock_socket mock_socket.recv.side_effect = (b'ZBXD', mock_data, self.resp_body) zm = ZabbixMetric('host1', 'key1', 100500, 1457358608) zs = ZabbixSender() result = zs.send([zm]) self.assertIsInstance(result, ZabbixResponse) self.assertEqual(result.chunk, 1) self.assertEqual(result.total, 10) self.assertEqual(result.failed, 10)
def test_send_failed(self, mock_socket): mock_data = b'\x01\\\x00\x00\x00\x00\x00\x00\x00' mock_socket.return_value = mock_socket mock_socket.recv.side_effect = (b'ZBXD', mock_data, b''' {"response": "suces","info":"processed: 0; failed: \ 10; total: 10; seconds spent: 0.000078"} ''') zm = ZabbixMetric('host1', 'key1', 100500, 1457358608) zs = ZabbixSender() with self.assertRaises(Exception): zs.send([zm])
def test_create_packet(self): message = [ '{"clock": "1457445366", "host": "host1",\ "value": "1", "key": "key1"}', '{"clock": "1457445366", "host": "host2",\ "value": "2", "key": "key2"}'] zs = ZabbixSender() request = zs._create_request(message) result = zs._create_packet(request) data_len = struct.pack('<Q', len(request)) self.assertEqual(result[5:13], data_len) self.assertEqual(result[:13], b'ZBXD\x01\xc4\x00\x00\x00\x00\x00\x00\x00')
def trapper(items_raw): if dict_setup["metric_sent_protocol"].lower() == "zabbix": hostname = dict_setup["metric_sent_hostname"].lower() zabbix_server = dict_setup["metric_sent_server"].lower() try: timestamp = items_raw['timestamp'] metrics = [] zbx = ZabbixSender(zabbix_server) for metric in items_raw: if metric != "timestamp": m = ZabbixMetric(host=hostname, key=metric, value=items_raw[metric], clock=timestamp) metrics.append(m) returapi = zbx.send(metrics) logging.info("{}: {}".format(inspect.stack()[1][3], returapi)) return True except Exception as e: logging.error("Trappto zabbix error: {} - {}".format(inspect.stack()[1][3], e)) return False else: return False
def on_message(client, userdata, msg): packet = [ ZabbixMetric(zabbix_item_host, zabbix_item_name, str(msg.payload)), ] sender = ZabbixSender(zabbix_server=zabbix_host, zabbix_port=10051, use_config=None) sender.send(packet)
'/usr/local/bin/speedtest-cli --simple', shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8') #ping = re.findall('Ping:\s(.*?)\s', response, re.MULTILINE) download = re.findall('Download:\s(.*?)\s', response, re.MULTILINE) upload = re.findall('Upload:\s(.*?)\s', response, re.MULTILINE) #ping = ping[0].replace(',', '.') download = download[0].replace(',', '.') upload = upload[0].replace(',', '.') pacote = [ ZabbixMetric('127.0.0.1', 'nettestd', download), ZabbixMetric('127.0.0.1', 'nettestu', upload) ] resultado = ZabbixSender(use_config=True).send(pacote) print(resultado) teste = literal_eval(str(resultado)) qtdErros = int(teste["failed"]) if qtdErros > 0: mensagem = "Ocorreu um erro ao enviar as chaves nettestd e nettestu para o zabbix via zabbix_sender!%0A" + str( resultado) response = requests.get("https://api.telegram.org/bot" + bot_token + "/sendMessage?text=" + mensagem + "&chat_id" + chat + id)
class Station: def __init__(self): self.Devices = [] self.modbusTypesLength = { 'int': 1, 'uint': 1, 'dint': 2, 'duint': 2, 'real': 2, 'float': 2, 'word': 1, 'dword': 2 } def addZabbixReceiver(self, ZabbixHostAddress, ZabbixHostPort=10051): self.ZabbixHost = ZabbixSender(ZabbixHostAddress, ZabbixHostPort) def addDevice(self, deviceType, deviceName, deviceAddress): Device = DeviceTemplate(deviceName, deviceAddress) Device.addVariablesList(deviceType) self.Devices.append(Device) def __sendToZabbix(self, deviceName, metricName, metricValue): if hasattr(self, 'ZabbixHost'): #print(deviceName,metricName,metricValue) metrics = [] metric = ZabbixMetric(deviceName, metricName, metricValue) metrics.append(metric) result = self.ZabbixHost.send(metrics) def runCycleInstance(self): #self.modbusClient = ModbusTcpClient(self.ipAddress,self.ipPort) if self.modbusClient.connect(): for runnedDevice in self.Devices: self.__processVariable(runnedDevice) self.modbusClient.close() def __processVariable(self, device): lastPoll = 0 variablesCount = device.getVariablesCount() onePollPart = 100 / variablesCount for i in range(variablesCount): try: varibaleValue = self.__readVariableFromModbus( device.getModbusAddress(), device.getVariableModbusAddress(i), device.getVariableType(i), device.getByteIndian(i), device.getWordIndian(i)) device.setVariableValue(i, varibaleValue) lastPoll = lastPoll + onePollPart self.__sendToZabbix(device.getZabbixName(), device.getVariableName(i), device.getVariableValue(i)) #print(device.getVariableValue(i)) except: print('Error reading register:', device.getModbusAddress(), device.getVariableName(i)) self.__sendToZabbix(device.getZabbixName(), 'LASTPOLL', lastPoll) def __readVariableFromModbus(self, modbusAddress, modbusVariableAddress, type, byteIndian, wordIndian): if (byteIndian == 'Big'): byteorder = Endian.Big else: byteorder = Endian.Little if (wordIndian == 'Big'): wordorder = Endian.Big else: wordorder = Endian.Little rr = self.modbusClient.read_holding_registers( modbusVariableAddress, self.modbusTypesLength[type], unit=modbusAddress) if (not rr.isError()): responseValue = self.__convertModbusResponseValue( type, rr, byteorder, wordorder) #print (responseValue) return responseValue def __convertModbusResponseValue(self, type, response, byteorder, wordorder): decoder = BinaryPayloadDecoder.fromRegisters(response.registers, byteorder=byteorder, wordorder=wordorder) if (type == 'int'): decoded = OrderedDict([ ('16int', decoder.decode_16bit_int()), ]) if (type == 'uint'): decoded = OrderedDict([ ('16uint', decoder.decode_16bit_uint()), ]) if (type == 'dint'): decoded = OrderedDict([('32int', decoder.decode_32bit_int())], ) if (type == 'duint'): decoded = OrderedDict([ ('32uint', decoder.decode_32bit_uint()), ]) for name, value in iteritems(decoded): decodedValue = value return decodedValue
class Main(Checks): def __init__(self): parser = argparse.ArgumentParser() parser.add_argument( '--address', required=True, help="Oracle database address") parser.add_argument( '--database', required=True, help="Oracle database SID") parser.add_argument('--username', help="Oracle database user") parser.add_argument( '--password', help="Oracle database user's password") parser.add_argument( '--port', default=1521, help="Oracle database port") parser.add_argument( '--ora1000', action='store_true', help="reconnect to Oracle database when request tablespace's size (bug 17897511)" ) parser.add_argument( '--verbose', '-v', action='store_true', help="Additional verbose information") self.args = parser.parse_args() if self.args.username is None: self.args.username = pyora_config.username if self.args.password is None: self.args.password = pyora_config.password def db_connect(self): dsn = cx_Oracle.makedsn(self.args.address, self.args.port, self.args.database) self.pool = cx_Oracle.SessionPool( user=self.args.username, password=self.args.password, dsn=dsn, min=1, max=3, increment=1) self.db = self.pool.acquire() self.cur = self.db.cursor() def db_close(self): self.cur.close() self.pool.release(self.db) def __call__(self): try: self.db_connect() except Exception, err: print str(err) return 1 Data = [] try: with open("/usr/lib/zabbix/cache/items-" + self.args.address + "-" + self.args.database + ".list") as keylist: for key in keylist: key = key.split(',') hostname = key[0] keyname = key[1].rstrip('\n') key = keyname.split('[') # Oracle bug 17897511 (ORA-1000 from query on DBA_TABLESPACE_USAGE_METRICS) if (key[0] == "tablespace" or key[0] == "tablespace_abs") and self.args.ora1000: self.db_close() self.db_connect() if len(key) > 1: key[1] = key[1].rstrip(']') if self.args.verbose: print "Processing: " + key[0] + ": " + key[1] value = getattr(Checks, key[0])(self, key[1]) if self.args.verbose: print "\t\t\t" + str(value) else: if self.args.verbose: print "Processing: " + key[0] value = getattr(Checks, key[0])(self) if self.args.verbose: print "\t\t\t" + str(value) Data.append(ZabbixMetric(hostname, keyname, value)) if self.args.verbose: print "Data to send:" print Data result = ZabbixSender(use_config=True).send(Data) print result print "\n" Data = [] if not self.args.verbose: result = ZabbixSender(use_config=True).send(Data) print result Data = [] Data.append( ZabbixMetric(hostname, "failedchecks", result.failed)) result = ZabbixSender(use_config=True).send(Data) print result except IOError, err: print str(err)
def alert(self, matches): zbx_host = self.zbx_host.strip() # Matches is a list of match dictionaries. # It contains more than one match when the alert has # the aggregation option set zm = [] ts_epoch = None for match in matches: if zbx_host.startswith('{{') and zbx_host.endswith('}}'): host_field = zbx_host[2:-2].strip() zbx_host = find_value(match, host_field.split('.')) if not zbx_host: elastalert_logger.error( f"Missing host field '%s' for dynamic host, alert will be discarded" % host_field) return if ':' not in match[self.timestamp_field] or '-' not in match[ self.timestamp_field]: ts_epoch = int(match[self.timestamp_field]) else: try: ts_epoch = int( datetime.strptime( match[self.timestamp_field], self.timestamp_strptime).strftime('%s')) except ValueError: ts_epoch = int( datetime.strptime(match[self.timestamp_field], '%Y-%m-%dT%H:%M:%SZ').strftime('%s')) zm.append( ZabbixMetric(host=zbx_host, key=self.zbx_key, value='1', clock=ts_epoch)) try: if self.extra_data: extra_data = [] for match in matches: for related_event in match.get('related_events', []): extra_data.append( get_fields(related_event, self.extra_data['fields'])) extra_data.append( get_fields(match, self.extra_data['fields'])) data = { 'template': self.extra_data['template'], 'data': extra_data } object_name = self.minio_client.upload_random_object( bucket_name=self.minio_bucket, data=json.dumps(data, indent=2)) if object_name: zm.append( ZabbixMetric(host=zbx_host, key=self.zbx_key_minio_data, value=object_name, clock=ts_epoch - 1)) else: elastalert_logger.warning( "Data couldn't be uploaded to MinIO, it won't be provided with the alert" ) zm.append( ZabbixMetric(host=zbx_host, key=self.zbx_key_minio_data, value='MinIO data upload failed', clock=ts_epoch - 1)) response = ZabbixSender(zabbix_server=self.zbx_sender_host, zabbix_port=self.zbx_sender_port).send(zm) if response.failed: elastalert_logger.warning( "Missing zabbix host '%s' or host's item '%s', alert will be discarded" % (zbx_host, self.zbx_key)) else: elastalert_logger.info("Alert sent to Zabbix") except Exception as e: raise EAException("Error sending alert to Zabbix: %s" % e)
def node_stats(metric): node_stats = es.nodes.stats(node_id='_local', request_timeout=60) node_id = node_stats['nodes'].keys()[0] total_merge = node_stats['nodes'][node_id]['indices']['merges'][ 'total_size_in_bytes'] total_field = node_stats['nodes'][node_id]['indices']['fielddata'][ 'memory_size_in_bytes'] search_query_total = node_stats['nodes'][node_id]['indices']['search'][ 'query_total'] docs_count = node_stats['nodes'][node_id]['indices']['docs']['count'] docs_deleted = node_stats['nodes'][node_id]['indices']['docs']['deleted'] thread_bulk_rejected = node_stats['nodes'][node_id]['thread_pool']['bulk'][ 'rejected'] thread_bulk_completed = node_stats['nodes'][node_id]['thread_pool'][ 'bulk']['completed'] thread_get_rejected = node_stats['nodes'][node_id]['thread_pool']['get'][ 'rejected'] thread_get_completed = node_stats['nodes'][node_id]['thread_pool']['get'][ 'completed'] thread_index_completed = node_stats['nodes'][node_id]['thread_pool'][ 'index']['completed'] thread_listener_completed = node_stats['nodes'][node_id]['thread_pool'][ 'listener']['completed'] #thread_percolate_completed = node_stats['nodes'][node_id]['thread_pool']['percolate']['completed'] thread_refresh_completed = node_stats['nodes'][node_id]['thread_pool'][ 'refresh']['completed'] thread_search_completed = node_stats['nodes'][node_id]['thread_pool'][ 'search']['completed'] thread_snapshot_completed = node_stats['nodes'][node_id]['thread_pool'][ 'snapshot']['completed'] #thread_suggest_completed = node_stats['nodes'][node_id]['thread_pool']['suggest']['completed'] thread_warmer_completed = node_stats['nodes'][node_id]['thread_pool'][ 'warmer']['completed'] metrics = [ ZabbixMetric(sys.argv[3], 'es.node[total_merges_mem]', total_merge), ZabbixMetric(sys.argv[3], 'es.node[total_field_data_mem]', total_field), ZabbixMetric(sys.argv[3], 'es.node[search_query_total]', search_query_total), ZabbixMetric(sys.argv[3], 'es.node[docs_count]', docs_count), ZabbixMetric(sys.argv[3], 'es.node[docs_deleted]', docs_deleted), ZabbixMetric(sys.argv[3], 'es.node[thread_bulk_rejected]', thread_bulk_rejected), ZabbixMetric(sys.argv[3], 'es.node[thread_bulk_completed]', thread_bulk_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_get_rejected]', thread_get_rejected), ZabbixMetric(sys.argv[3], 'es.node[thread_get_completed]', thread_get_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_index_completed]', thread_index_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_listener_completed]', thread_listener_completed), #ZabbixMetric(sys.argv[3], 'es.node[thread_percolate_completed]', thread_percolate_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_refresh_completed]', thread_refresh_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_search_completed]', thread_search_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_snapshot_completed]', thread_snapshot_completed), #ZabbixMetric(sys.argv[3], 'es.node[thread_suggest_completed]', thread_suggest_completed), ZabbixMetric(sys.argv[3], 'es.node[thread_warmer_completed]', thread_warmer_completed), ] ZabbixSender(use_config=True).send(metrics)
def pure_array_info(): try: '''Get the argument from Zabbix''' ip = str(sys.argv[2]) #IP of the Pure Storage Array token = str(sys.argv[3]) #API Token host = str(sys.argv[4]) #Host name (for the sender) zabbixIP = str(sys.argv[5]) #Zabbix Proxy or Server IP (for the sender) '''Get data''' arrayConnect = purestorage.FlashArray(ip,api_token=token,verify_https=False) arraySpace = arrayConnect.get(space="true") arrayInfo = arrayConnect.get() arrayPhoneHome = arrayConnect.get_phonehome() arrayRemoteAssist = arrayConnect.get_remote_assist_status() arrayValues = arraySpace[0] '''Will disable the output to console''' FNULL = open(os.devnull, 'w') '''Sending data''' metrics = [] if "capacity" in arrayValues: arrayCapacity = str(arrayValues["capacity"]) m = ZabbixMetric(host,'pure.array.capacity',arrayCapacity) metrics.append(m) if "volumes" in arrayValues: arrayVolumesSize = str(arrayValues["volumes"]) m = ZabbixMetric(host,'pure.array.volumes.size',arrayVolumesSize) metrics.append(m) if "data_reduction" in arrayValues: arrayDataReduction = str(arrayValues["data_reduction"]) m = ZabbixMetric(host,'pure.array.data.reduction',arrayDataReduction) metrics.append(m) if "total" in arrayValues: arrayUsedSpace = str(arrayValues["total"]) m = ZabbixMetric(host,'pure.array.used.space',arrayUsedSpace) metrics.append(m) if "shared_space" in arrayValues: arraySharedSpace = str(arrayValues["shared_space"]) m = ZabbixMetric(host,'pure.array.shared.space',arraySharedSpace) metrics.append(m) if "thin_provisioning" in arrayValues: arrayThinProvisioning = str(arrayValues["thin_provisioning"]) m = ZabbixMetric(host,'pure.array.thin.provisioning',arrayThinProvisioning) metrics.append(m) if "total_reduction" in arrayValues: arrayTotalReduction = str(arrayValues["total_reduction"]) m = ZabbixMetric(host,'pure.array.total.data.reduction',arrayTotalReduction) metrics.append(m) if "array_name" in arrayInfo: arrayHostname = arrayInfo["array_name"] m = ZabbixMetric(host,'pure.array.hostname',arrayHostname) metrics.append(m) if "version" in arrayInfo: arrayVersion = str(arrayInfo["version"]) m = ZabbixMetric(host,'pure.array.version',arrayVersion) metrics.append(m) if "status" in arrayRemoteAssist: remoteAssist = arrayRemoteAssist["status"] m = ZabbixMetric(host,'pure.remote.assist',remoteAssist) metrics.append(m) if "phonehome" in arrayPhoneHome: phoneHome = arrayPhoneHome["phonehome"] m = ZabbixMetric(host,'pure.phone.home',phoneHome) metrics.append(m) data = ZabbixSender(zabbixIP) data.send(metrics) '''Send 1 to give a result to Zabbix''' print(1) except Exception as e: ''' Sending 0 to Zabbix instead of a Python error. Like that the items won't be considered as "unsupported" ''' metrics = [ZabbixMetric(host,'pure.info.launcher.error',str(e))] data = ZabbixSender(zabbixIP) data.send(metrics) print(0)
def pure_array_monitoring(): try: '''Get the argument from Zabbix''' ip = str(sys.argv[2]) #IP of the Pure Storage Array token = str(sys.argv[3]) #API Token host = str(sys.argv[4]) #Host name (for the sender) zabbixIP = str(sys.argv[5]) #Zabbix Proxy or Server IP (for the sender) '''Get data''' arrayConnect = purestorage.FlashArray(ip,api_token=token,verify_https=False) arrayMonitoring = arrayConnect.get(action="monitor") arrayValues = arrayMonitoring[0] '''Will disable the output to console''' FNULL = open(os.devnull, 'w') '''Send data''' metrics = [] if "input_per_sec" in arrayValues: arrayInputPerSec = str(arrayValues["input_per_sec"]) m = ZabbixMetric(host,"pure.array.input.per.second",arrayInputPerSec) metrics.append(m) if "output_per_sec" in arrayValues: arrayOutputPerSec = str(arrayValues["output_per_sec"]) m = ZabbixMetric(host,"pure.array.output.per.second",arrayOutputPerSec) metrics.append(m) if "queue_depth" in arrayValues: arrayQueueDepth = str(arrayValues["queue_depth"]) m = ZabbixMetric(host,"pure.array.queue.depth",arrayQueueDepth) metrics.append(m) if "reads_per_sec" in arrayValues: arrayReadPerSec = str(arrayValues["reads_per_sec"]) m = ZabbixMetric(host,"pure.array.read.per.sec",arrayReadPerSec) metrics.append(m) if "san_usec_per_read_op" in arrayValues: arraySanUsecPerReadOp = str(arrayValues["san_usec_per_read_op"]) m = ZabbixMetric(host,"pure.array.san.usec.per.read",arraySanUsecPerReadOp) metrics.append(m) if "san_usec_per_write_op" in arrayValues: arraySanUsecPerWriteOp = str(arrayValues["san_usec_per_write_op"]) m = ZabbixMetric(host,"pure.array.san.usec.per.write",arraySanUsecPerWriteOp) metrics.append(m) if "usec_per_read_op" in arrayValues: arrayUsecPerReadOp = str(arrayValues["usec_per_read_op"]) m = ZabbixMetric(host,"pure.array.usec.per.read",arrayUsecPerReadOp) metrics.append(m) if "usec_per_write_op" in arrayValues: arrayUsecPerWriteOp = str(arrayValues["usec_per_write_op"]) m = ZabbixMetric(host,"pure.array.usec.per.write",arrayUsecPerWriteOp) metrics.append(m) if "writes_per_sec" in arrayValues: arrayWritePerSec = str(arrayValues["writes_per_sec"]) m = ZabbixMetric(host,"pure.array.write.per.sec",arrayWritePerSec) metrics.append(m) data = ZabbixSender(zabbixIP) data.send(metrics) '''Send 1 to give a result to Zabbix''' print(1) except Exception as e: ''' Sending 0 to Zabbix instead of a Python error Like that the items won't be considered as unsupported ''' metrics = [ZabbixMetric(host,"pure.monitoring.launcher.error",str(e))] data = ZabbixSender(zabbixIP) data.send(metrics) print(0)
def __call__(self): try: self.db_connect() except Exception as err: print(str(err)) return 1 Data = [] try: with open("/usr/lib/zabbix/cache/items-" + self.args.address + "-" + self.args.database + ".list") as keylist: for key in keylist: key = key.split(',') hostname = key[0] keyname = key[1].rstrip('\n') key = keyname.split('[') # Oracle bug 17897511 (ORA-1000 from query on DBA_TABLESPACE_USAGE_METRICS) if (key[0] == "tablespace" or key[0] == "tablespace_abs") and self.args.ora1000: self.db_close() self.db_connect() if len(key) > 1: key[1] = key[1].rstrip(']') if self.args.verbose: print("Processing: " + key[0] + ": " + key[1]) value = getattr(Checks, key[0])(self, key[1]) if self.args.verbose: print("\t\t\t" + str(value)) else: if self.args.verbose: print("Processing: " + key[0]) value = getattr(Checks, key[0])(self) if self.args.verbose: print("\t\t\t" + str(value)) Data.append(ZabbixMetric(hostname, keyname, value)) if self.args.verbose: print("Data to send:") print(Data) result = ZabbixSender(use_config=True).send(Data) print(result) print("\n") Data = [] if not self.args.verbose: result = ZabbixSender(use_config=True).send(Data) print(result) Data = [] Data.append( ZabbixMetric(hostname, "failedchecks", result.failed)) result = ZabbixSender(use_config=True).send(Data) print(result) except IOError as err: print(str(err)) finally: self.db_close()
def zabbix_send_to_trapper(zabbix_server, host, key, value): metrics = [ZabbixMetric(host, key, value)] result_sent = ZabbixSender(zabbix_server=zabbix_server, zabbix_port=10051).send(metrics) print("ZabbixSender status: ", result_sent) return result_sent
def setup(opp, config): """Set up the Zabbix component.""" conf = config[DOMAIN] protocol = "https" if conf[CONF_SSL] else "http" url = urljoin(f"{protocol}://{conf[CONF_HOST]}", conf[CONF_PATH]) username = conf.get(CONF_USERNAME) password = conf.get(CONF_PASSWORD) publish_states_host = conf.get(CONF_PUBLISH_STATES_HOST) entities_filter = convert_include_exclude_filter(conf) try: zapi = ZabbixAPI(url=url, user=username, password=password) _LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version()) except ZabbixAPIException as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False except HTTPError as http_error: _LOGGER.error("HTTPError when connecting to Zabbix API: %s", http_error) zapi = None _LOGGER.error(RETRY_MESSAGE, http_error) event_helper.call_later(opp, RETRY_INTERVAL, lambda _: setup(opp, config)) return True opp.data[DOMAIN] = zapi def event_to_metrics(event, float_keys, string_keys): """Add an event to the outgoing Zabbix list.""" state = event.data.get("new_state") if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE): return entity_id = state.entity_id if not entities_filter(entity_id): return floats = {} strings = {} try: _state_as_value = float(state.state) floats[entity_id] = _state_as_value except ValueError: try: _state_as_value = float(state_helper.state_as_number(state)) floats[entity_id] = _state_as_value except ValueError: strings[entity_id] = state.state for key, value in state.attributes.items(): # For each value we try to cast it as float # But if we can not do it we store the value # as string attribute_id = f"{entity_id}/{key}" try: float_value = float(value) except (ValueError, TypeError): float_value = None if float_value is None or not math.isfinite(float_value): strings[attribute_id] = str(value) else: floats[attribute_id] = float_value metrics = [] float_keys_count = len(float_keys) float_keys.update(floats) if len(float_keys) != float_keys_count: floats_discovery = [] for float_key in float_keys: floats_discovery.append({"{#KEY}": float_key}) metric = ZabbixMetric( publish_states_host, "openpeerpower.floats_discovery", json.dumps(floats_discovery), ) metrics.append(metric) for key, value in floats.items(): metric = ZabbixMetric(publish_states_host, f"openpeerpower.float[{key}]", value) metrics.append(metric) string_keys.update(strings) return metrics if publish_states_host: zabbix_sender = ZabbixSender(zabbix_server=conf[CONF_HOST]) instance = ZabbixThread(opp, zabbix_sender, event_to_metrics) instance.setup(opp) return True
import os import json import whois from pyzabbix import ZabbixMetric, ZabbixSender wwwhost = sys.argv[1] dir_path = os.path.dirname(os.path.realpath(__file__)) with open(dir_path + '/whois/' + wwwhost + '.json', 'r') as read_file: domains = json.load(read_file) for domain in domains['domains']: try: w = whois.whois(domain) if isinstance(w.expiration_date, list): packet = ZabbixMetric(wwwhost, 'domain.expiry[' + domain + ']', w.expiration_date[0].timestamp()), else: if w.expiration_date: packet = ZabbixMetric(wwwhost, 'domain.expiry[' + domain + ']', w.expiration_date.timestamp()), result = ZabbixSender(use_config=True).send(packet) except: print("Error occurred while executing whois() for %s." % domain) ## some tests #print(w) #print(w.domain_name, w.expiration_date) #print(packet) #print(result)
def main(): """main I guess""" host_list = get_hosts(HOSTS_FILE) all_openstack_instances = [] p = Pool(min(MAX_PROCESSES, len(host_list))) custom_wrapper = functools.partial( PyZabbixPSKSocketWrapper, identity=PSK_IDENTITY, psk=bytes(bytearray.fromhex(PSK))) zabbix_sender = ZabbixSender( zabbix_server=ZABBIX_SERVER, socket_wrapper=custom_wrapper, timeout=30) custom_process_host = functools.partial( process_host, zabbix_sender=zabbix_sender) results = filter(None, p.map(custom_process_host, host_list)) print("Processed all host") for result in results: all_openstack_instances.extend(result) with ZabbixConnection(USER, "https://" + ZABBIX_SERVER, PASSWORD) as zapi: openstack_group_id = zapi.get_group_id(GROUP_NAME) all_zabbix_hosts = zapi.get_all_hosts([openstack_group_id]) hosts_not_in_openstack = list( set(all_zabbix_hosts) - set(all_openstack_instances)) p = Pool(min(MAX_PROCESSES, len(hosts_not_in_openstack))) lockfile = "/tmp/openstack-monitoring.lockfile" if os.path.exists(lockfile): main_logger.info("lockfile exists, quitting") sys.exit(0) # only execute once/twice every hour. if not (10 < datetime.now().minute < 17): main_logger.info("not the right time to cleanup, quitting") sys.exit(0) open(lockfile, "w").close() try: main_logger.info("Starting cleanup tasks") results = filter(None, p.map(cleanup_host, hosts_not_in_openstack)) print("Clean up processes finished") # FIXME: the list comprehensions are really slow, since we # iterate over a lot of items. hosts_to_be_deleted = [result["host_id"] for result in results if result["action"] == "delete"] hosts_to_be_disabled = [result["host_id"] for result in results if result["action"] == "disable"] if hosts_to_be_disabled != []: zapi.set_hosts_status(hosts_to_be_disabled, DISABLE_HOST) if hosts_to_be_deleted != []: zapi.delete_hosts(hosts_to_be_deleted) print("Hosts not in openstack:" + str(len(hosts_not_in_openstack))) print("hosts_disabled:" + str(len(hosts_to_be_disabled))) print("hosts_deleted:" + str(len(hosts_to_be_deleted))) finally: os.remove(lockfile)
def main(fs_config=None, sleep_seconds=None): # read configuration config = default_config() if fs_config is not None: fs_config = path.abspath(fs_config) config.update(config_from_file(fs_config)) config.update(config_from_env()) if sleep_seconds is not None: config['sleep_seconds'] = sleep_seconds else: config['sleep_seconds'] = int(config['sleep_seconds']) logging.basicConfig(stream=sys.stdout, level=getattr(logging, config['log_level'].upper())) # read history of previous runs errors = [] fs_history = path.abspath( path.join(path.dirname(fs_config), 'watchdog-history.json')) zbx = None result_code = None if 'zabbix_host' in config: zbx = ZabbixSender(config['zabbix_host']) while True: try: if path.exists(fs_history): previous_history = json.load(open(fs_history, 'r')) else: log.info("Starting with empty history.") previous_history = dict() # fetch submissions from mail server log.debug("Fetching previous submissions from IMAP server") history = fetch_test_submissions(previous_history=previous_history, config=config) # check for failed test submissions max_process_secs = int(config['max_process_secs']) now = datetime.now() for token, timestamp_str in history.items(): timestamp = datetime.utcfromtimestamp((timegm( time.strptime( timestamp_str.split('.')[0] + 'UTC', "%Y-%m-%dT%H:%M:%S%Z")))) age = now - timestamp if age.seconds > max_process_secs and token not in previous_history: errors.append( WatchdogError( subject="Submission '%s' not received" % token, message= u"The submission with token %s which was submitted on %s was not received after %d seconds." % (token, timestamp, max_process_secs))) # perform test submission log.debug("Performing test submissions against {app_url}".format( **config)) token, submission_errors = perform_submission( app_url=config['app_url'], testing_secret=config['testing_secret']) if token: history[token] = datetime.now().isoformat() errors += submission_errors # record updated history file_history = open(fs_history, 'w') file_history.write(json.dumps(history).encode('utf-8')) file_history.close() if len(errors) > 0: log.warning("Errors were found.") from pyramid_mailer import mailer_factory_from_settings from pyramid_mailer.message import Message from urlparse import urlparse mailer = mailer_factory_from_settings(config, prefix='smtp_') hostname = urlparse(config['app_url']).hostname recipients = [ recipient for recipient in config['notify_email'].split() if recipient ] message = Message( subject="[Briefkasten %s] Submission failure" % hostname, sender=config['the_sender'], recipients=recipients, body="\n".join([str(error) for error in errors])) mailer.send_immediately(message, fail_silently=False) result_code = 0 except Exception as exc: log.error(exc) result_code = 1 if zbx is not None: log.info("Pinging Zabbix") metric = ZabbixMetric(config.get('zabbix_sender', 'localhost'), 'briefkasten.watchdog.last_completed_run', result_code) sent = zbx.send([metric]) if sent.failed > 0: log.warning("Failed to ping Zabbix host") if config['sleep_seconds'] > 0: log.info("Sleeping {sleep_seconds} seconds".format(**config)) sleep(config['sleep_seconds']) else: exit(0)
ctime = int(ctime.timestamp()) #Low Level Discovery first = True lld = '{ \"data\":[' for i, col in enumerate(header1): part1 = col.replace('/', '.').replace(' ', '.') part2 = header2[i].replace('/', '.').replace(' ', '.') if part2 != "Time": if first: first = False else: lld = lld + ',' lld = lld + '{' lld = lld + '"name":"' + col + ' ' + header2[i] + '",' lld = lld + '"key":"' + part1 + '-' + part2 + '"' lld = lld + '}' lld = lld + ']}' packet.append(ZabbixMetric(THIS_HOST, 'ohm.discovery', lld, ctime)) #Send data for i, col in enumerate(header1): part1 = col.replace('/', '.').replace(' ', '.') part2 = header2[i].replace('/', '.').replace(' ', '.') if part2 != "Time": packet.append( ZabbixMetric(THIS_HOST, 'ohm.metric[' + part1 + "-" + part2 + ']', last[i], ctime)) result = ZabbixSender(use_config=False, zabbix_server=ZABBIX_SERVER).send(packet)
class ZabbixSink(BaseThreadedModule): """ Send events to zabbix. hostname: Hostname for which the metrics should be stored. fields: Event fields to send. field_prefix: Prefix to prepend to field names. For e.g. cpu_count field with default lumbermill_ prefix, the Zabbix key is lumbermill_cpu_count. timestamp_field: Field to provide timestamp. If not provided, current timestamp is used. agent_conf: Path to zabbix_agent configuration file. If set to True defaults to /etc/zabbix/zabbix_agentd.conf. server: Address of zabbix server. If port differs from default it can be set by appending it, e.g. 127.0.0.1:10052. store_interval_in_secs: sending data to es in x seconds intervals. batch_size: sending data to es if event count is above, even if store_interval_in_secs is not reached. backlog_size: maximum count of events waiting for transmission. Events above count will be dropped. Configuration template: - ZabbixSink: hostname: # <type: string; is: required> fields: # <type: list; is: required> field_prefix: # <default: "lumbermill_"; type: string; is: optional> timestamp_field: # <default: "timestamp"; type: string; is: optional> agent_conf: # <default: True; type: boolean||string; is: optional> server: # <default: False; type: boolean||string; is: required if agent_conf is False else optional> store_interval_in_secs: # <default: 10; type: integer; is: optional> batch_size: # <default: 500; type: integer; is: optional> backlog_size: # <default: 500; type: integer; is: optional> """ module_type = "output" """Set module type""" def configure(self, configuration): BaseThreadedModule.configure(self, configuration) self.hostname = self.getConfigurationValue("hostname") self.fields = self.getConfigurationValue("fields") self.field_prefix = self.getConfigurationValue("field_prefix") self.timestamp_field = self.getConfigurationValue("timestamp_field") self.batch_size = self.getConfigurationValue('batch_size') self.backlog_size = self.getConfigurationValue('backlog_size') self.agent_conf = self.getConfigurationValue("agent_conf") if self.agent_conf: if self.agent_conf is True: self.agent_conf = "/etc/zabbix/zabbix_agentd.conf" if not os.path.isfile(self.agent_conf): self.logger.error("%s does not point to an existing file." % self.agent_conf) self.lumbermill.shutDown() self.zabbix_sender = ZabbixSender(use_config=self.agent_conf) else: self.logger.error("asdads") server = self.getConfigurationValue("server") port = 10051 if ":" in self.server: server, port = self.server.split(":") self.zabbix_sender = ZabbixSender(zabbix_server=server, port=port) self.buffer = Buffer(self.getConfigurationValue('batch_size'), self.storeData, self.getConfigurationValue('store_interval_in_secs'), maxsize=self.getConfigurationValue('backlog_size')) def getStartMessage(self): if self.agent_conf: return "Config: %s. Max buffer size: %d" % (self.agent_conf, self.getConfigurationValue('backlog_size')) else: return "Server: %s. Max buffer size: %d" % (self.getConfigurationValue("server"), self.getConfigurationValue('backlog_size')) def initAfterFork(self): BaseThreadedModule.initAfterFork(self) self.buffer = Buffer(self.getConfigurationValue('batch_size'), self.storeData, self.getConfigurationValue('store_interval_in_secs'), maxsize=self.getConfigurationValue('backlog_size')) def handleEvent(self, event): self.buffer.append(event) yield None def storeData(self, events): packet = [] for event in events: if self.timestamp_field: try: timestamp = event[self.timestamp_field] except KeyError: timestamp = None hostname = mapDynamicValue(self.hostname, mapping_dict=event, use_strftime=True) for field_name in self.fields: try: packet.append(ZabbixMetric(hostname, "%s%s" % (self.field_prefix, field_name), event[field_name], timestamp)) except KeyError: pass #self.logger.warning("Could not send metrics for %s:%s. Field not found." % (hostname, field_name)) response = self.zabbix_sender.send(packet) if response.failed != 0: self.logger.warning("%d of %d metrics were not processed correctly." % (response.total-response.processed, response.total)) def shutDown(self): self.buffer.flush()
def pure_volume_monitoring(): try: '''Get the argument from Zabbix''' ip = str(sys.argv[2]) #IP of the Pure Storage Array token = str(sys.argv[3]) #API Token host = str(sys.argv[4]) #Host name (for the sender) zabbixIP = str(sys.argv[5]) #Zabbix Proxy or Server IP (for the sender) '''Get data''' arrayConnect = purestorage.FlashArray(ip,api_token=token,verify_https=False) volumeList = arrayConnect.list_volumes() metrics = [] for i in volumeList: volume = i["name"] volumeMonitoring = arrayConnect.get_volume(volume=volume,action="monitor") volumeSpace = arrayConnect.get_volume(volume=volume,space="true") volumeInfo = arrayConnect.get_volume(volume=volume) arrayValues = volumeMonitoring[0] '''Will disable the output to console''' FNULL = open(os.devnull, 'w') '''Sending data''' if "input_per_sec" in arrayValues: arrayInputPerSec = str(arrayValues["input_per_sec"]) m = ZabbixMetric(host,"pure.volume.input.per.second["+volume+"]",arrayInputPerSec) metrics.append(m) if "output_per_sec" in arrayValues: arrayOutputPerSec = str(arrayValues["output_per_sec"]) m = ZabbixMetric(host,"pure.volume.output.per.second["+volume+"]",arrayOutputPerSec) metrics.append(m) if "reads_per_sec" in arrayValues: arrayReadPerSec = str(arrayValues["reads_per_sec"]) m = ZabbixMetric(host,"pure.volume.read.per.sec["+volume+"]",arrayReadPerSec) metrics.append(m) if "san_usec_per_read_op" in arrayValues: arraySanUsecPerReadOp = str(arrayValues["san_usec_per_read_op"]) m = ZabbixMetric(host,"pure.volume.san.usec.per.read["+volume+"]",arraySanUsecPerReadOp) metrics.append(m) if "san_usec_per_write_op" in arrayValues: arraySanUsecPerWriteOp = str(arrayValues["san_usec_per_write_op"]) m = ZabbixMetric(host,"pure.volume.san.usec.per.write["+volume+"]",arraySanUsecPerWriteOp) metrics.append(m) if "usec_per_read_op" in arrayValues: arrayUsecPerReadOp = str(arrayValues["usec_per_read_op"]) m = ZabbixMetric(host,"pure.volume.usec.per.read["+volume+"]",arrayUsecPerReadOp) metrics.append(m) if "usec_per_write_op" in arrayValues: arrayUsecPerWriteOp = str(arrayValues["usec_per_write_op"]) m = ZabbixMetric(host,"pure.volume.usec.per.write["+volume+"]",arrayUsecPerWriteOp) metrics.append(m) if "writes_per_sec" in arrayValues: arrayWritePerSec = str(arrayValues["writes_per_sec"]) m = ZabbixMetric(host,"pure.volume.write.per.sec["+volume+"]",arrayWritePerSec) metrics.append(m) if "size" in volumeInfo: arrayVolumeSize = str(volumeInfo["size"]) m = ZabbixMetric(host,"pure.volume.size["+volume+"]",arrayVolumeSize) metrics.append(m) if "snapshots" in volumeSpace: volumeSnapshots = str(volumeSpace["snapshots"]) m = ZabbixMetric(host,"pure.volume.snapshots.size["+volume+"]",volumeSnapshots) metrics.append(m) if "data_reduction" in volumeSpace: volumeDataReduction = str(volumeSpace["data_reduction"]) m = ZabbixMetric(host,"pure.volume.data.reduction["+volume+"]",volumeDataReduction) metrics.append(m) if "thin_provisioning" in volumeSpace: volumeThinProvisioning = str(volumeSpace["thin_provisioning"]) m = ZabbixMetric(host,"pure.volume.thin.provisioning["+volume+"]",volumeThinProvisioning) metrics.append(m) if "total_reduction" in volumeSpace: volumeTotalReduction = str(volumeSpace["total_reduction"]) m = ZabbixMetric(host,"pure.volume.total.data.reduction["+volume+"]",volumeTotalReduction) metrics.append(m) if "volumes" in volumeSpace: volumeUsedSpace = str(volumeSpace["volumes"]) m = ZabbixMetric(host,"pure.volume.used.space["+volume+"]",volumeUsedSpace) metrics.append(m) data = ZabbixSender(zabbixIP) data.send(metrics) '''Send 1 to give a result to Zabbix''' print(1) except Exception as e: ''' Sending 0 to Zabbix instead of a Python error. Like that the items won't be considered as "unsupported" ''' metrics = [ZabbixMetric(host,"pure.volume.monitoring.launcher.error",str(e))] data = ZabbixSender(zabbixIP) data.send(metrics) print(0)
def pure_host_monitoring(): try: '''Get the argument from Zabbix''' ip = str(sys.argv[2]) #IP of the Pure Storage Array token = str(sys.argv[3]) #API Token host = str(sys.argv[4]) #Host name (for the sender) zabbixIP = str(sys.argv[5]) #Zabbix Proxy or Server IP (for the sender) '''Get data''' arrayConnect = purestorage.FlashArray(ip,api_token=token,verify_https=False) hostList = arrayConnect.list_hosts() metrics = [] for i in hostList: hostname = i["name"] hostMonitoring = arrayConnect.get_host(host=hostname,action="monitor") '''Sending data''' if "input_per_sec" in hostMonitoring: arrayInputPerSec = str(hostMonitoring["input_per_sec"]) m = ZabbixMetric(host,"pure.host.input.per.second["+hostname+"]",arrayInputPerSec) metrics.append(m) if "output_per_sec" in hostMonitoring: arrayOutputPerSec = str(hostMonitoring["output_per_sec"]) m = ZabbixMetric(host,"pure.host.output.per.second["+hostname+"]",arrayOutputPerSec) metrics.append(m) if "reads_per_sec" in hostMonitoring: arrayReadPerSec = str(hostMonitoring["reads_per_sec"]) m = ZabbixMetric(host,"pure.host.read.per.sec["+hostname+"]",arrayReadPerSec) metrics.append(m) if "san_usec_per_read_op" in hostMonitoring: arraySanUsecPerReadOp = str(hostMonitoring["san_usec_per_read_op"]) m = ZabbixMetric(host,"pure.host.san.usec.per.read["+hostname+"]",arraySanUsecPerReadOp) metrics.append(m) if "san_usec_per_write_op" in hostMonitoring: arraySanUsecPerWriteOp = str(hostMonitoring["san_usec_per_write_op"]) m = ZabbixMetric(host,"pure.host.san.usec.per.write["+hostname+"]",arraySanUsecPerWriteOp) metrics.append(m) if "usec_per_read_op" in hostMonitoring: arrayUsecPerReadOp = str(hostMonitoring["usec_per_read_op"]) m = ZabbixMetric(host,"pure.host.usec.per.read["+hostname+"]",arrayUsecPerReadOp) metrics.append(m) if "usec_per_write_op" in hostMonitoring: arrayUsecPerWriteOp = str(hostMonitoring["usec_per_write_op"]) m = ZabbixMetric(host,"pure.host.usec.per.write["+hostname+"]",arrayUsecPerWriteOp) metrics.append(m) if "writes_per_sec" in hostMonitoring: arrayWritePerSec = str(hostMonitoring["writes_per_sec"]) m = ZabbixMetric(host,"pure.host.write.per.sec["+hostname+"]",arrayWritePerSec) metrics.append(m) data = ZabbixSender(zabbixIP) data.send(metrics) '''Send 1 to give a result to Zabbix''' print(1) except Exception as e: ''' Sending 0 to Zabbix instead of a Python error. Like that the items won't be considered as "unsupported" ''' metrics = [ZabbixMetric(host,"pure.host.monitoring.launcher.error",str(e))] data = ZabbixSender(zabbixIP) data.send(metrics) print(0)
def test_repr(self): zs = ZabbixSender() self.assertEqual(zs.__repr__(), json.dumps(zs.__dict__))
logging.basicConfig(stream=sys.stdout, level=logging.INFO) from pyzabbix import ZabbixMetric, ZabbixSender # argument check if len(sys.argv)<4: print "USAGE: zabbixServer hostId key value" print "EXAMPLE: 127.0.0.1 myhost mystr1 testvalue" sys.exit(1) # simple parse for arguments zserver = sys.argv[1] if zserver.lower().startswith("http"): print "Do not prefix the zabbix server name with 'http' or 'https', just specify the hostname or IP" sys.exit(1) hostId = sys.argv[2] key = sys.argv[3] value = sys.argv[4] port = 10051 # Send metrics to zabbix trapper packet = [ ZabbixMetric(hostId, key, value) # multiple metrics can be sent in same call for effeciency #,ZabbixMetric(hostId, 'anotherkey', 'anothervalue') ] result = ZabbixSender(zserver,port,use_config=None).send(packet) print result
def addZabbixReceiver(self, ZabbixHostAddress, ZabbixHostPort=10051): self.ZabbixHost = ZabbixSender(ZabbixHostAddress, ZabbixHostPort)
] r = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = r.communicate() res = out + err #print("DEBUG: STR: " + arg) #print("DEBUG: RES: " + res) #print("DEBUG: ERR: " + err + str(len(err))) state = 0 if len(err) > 0: packet = [ ZabbixMetric(zbhost, 'mongodb_state', state), ZabbixMetric(zbhost, 'mongodb_errstr', err) ] result = ZabbixSender(zabbix_port=ZBPORT, zabbix_server=ZBSERVER).send(packet) print(err) sys.exit(1) res = res.rstrip() res = res.replace('*', '') res = re.sub("^ +", "", res) arr = re.split(" +", res) def str_to_int(s): m = re.match('(\d+)(\[a-z]|[A-Z])', s) r = re.match('(\d+).(\d+)(\[a-z]|[A-Z])', s) if r: m = r if m: i = int(m.group(1))
tikers_cripto = [] for t in tikers['nacionais']: for k in t.keys(): tikers_nacionais.append(k) for t in tikers['internacionais']: for k in t.keys(): tikers_internacionais.append(k) for t in tikers['criptomoedas']: for k in t.keys(): tikers_cripto.append(k) try: zbx = ZabbixSender(zabbix_server=IP, zabbix_port=10051, use_config=None) except: print('Problemas ao se conectar com o servidor zabbix') while True: try: metrics = [] for t in tikers_internacionais: df_internacionais[t] = wb.DataReader(t, data_source='yahoo', start=dia)['Adj Close'] for t in tikers_nacionais: df_nacionais[t] = wb.DataReader(t, data_source='yahoo',
def test_load_from_config(self): folder = os.path.dirname(__file__) filename = os.path.join(folder, 'data/zabbix_agentd.conf') zs = ZabbixSender() result = zs._load_from_config(config_file=filename) self.assertEqual(result, [('192.168.1.2', 10051)])
def sender(): zbx = ZabbixSender('127.0.0.1') metrics = [ZabbixMetric('my.local.sender', 'trap', 1)] print(zbx.send(metrics))
from pyzabbix import ZabbixMetric, ZabbixSender, ZabbixResponse # argument check if len(sys.argv) < 4: print("USAGE: zabbixServer hostId key value") print("EXAMPLE: 127.0.0.1 myhost mystr1 testvalue") sys.exit(1) # simple parse for arguments zserver = sys.argv[1] if zserver.lower().startswith("http"): print( "Do not prefix the zabbix server name with 'http' or 'https', just specify the hostname or IP" ) sys.exit(1) hostId = sys.argv[2] key = sys.argv[3] value = sys.argv[4] port = 10051 # Send metrics to zabbix trapper packet = [ ZabbixMetric(hostId, key, value) # multiple metrics can be sent in same call for effeciency # ,ZabbixMetric(hostId, 'anotherkey', 'anothervalue') ] ZabbixResponse = ZabbixSender(zserver, port, use_config=None).send(packet) print(ZabbixResponse)
# sys.exit(2) SMCLI = "/opt/IBM_DS/client/SMcli" CONN = ipa + " " + ipb cmd = SMCLI + " " + CONN + " -S -c \"set session performanceMonitorInterval=3 performanceMonitorIterations=1;show allLogicalDrives performanceStats;\"" #print cmd proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) (out, err) = proc.communicate() strings = out.split("\n",7)[7] #print strings packet = [] output = [] for str in strings.splitlines(): arr = str.split(',') output.append({"{#VALUE}": arr[0].replace("\"","")}) packet.append(ZabbixMetric(host, 'total.ios['+arr[0].replace("\"","")+']',arr[1].replace("\"", ""))) packet.append(ZabbixMetric(host, 'read['+arr[0].replace("\"","")+']',arr[2].replace("\"", ""))) packet.append(ZabbixMetric(host, 'read.cache.hit['+arr[0].replace("\"","")+']',arr[3].replace("\"", ""))) packet.append(ZabbixMetric(host, 'write.cache.hit['+arr[0].replace("\"","")+']',arr[4].replace("\"", ""))) packet.append(ZabbixMetric(host, 'ssd.cache.hit['+arr[0].replace("\"","")+']',arr[5].replace("\"", ""))) packet.append(ZabbixMetric(host, 'current.MBs['+arr[0].replace("\"","")+']',arr[6].replace("\"", ""))) packet.append(ZabbixMetric(host, 'max.MBs['+arr[0].replace("\"","")+']',arr[7].replace("\"", ""))) packet.append(ZabbixMetric(host, 'current.ios['+arr[0].replace("\"","")+']',arr[8].replace("\"", ""))) packet.append(ZabbixMetric(host, 'max.ios['+arr[0].replace("\"","")+']',arr[9].replace("\"", ""))) if arr[0].replace("\"","") == "STORAGE SUBSYSTEM TOTALS": break #print packet ZabbixSender(zabbix_server='192.168.10.45', zabbix_port=10051).send(packet) print '{"data":' print json.dumps(output) print '}'
ufos_url = "http://" + url.netloc + "/sufdclient/index.zul" else: ufos_url = "http://" + url.netloc try: driver.get(ufos_url) start_time = time.time() driver.find_element(By.ID, "user").click() driver.find_element(By.ID, "user").send_keys(args.login) driver.find_element(By.ID, "psw").click() driver.find_element(By.ID, "psw").send_keys(args.password) driver.find_element(By.ID, "okButton").click() wait.until(expected_conditions.element_to_be_clickable((By.XPATH, "//span[contains(.,'Настройки')]"))) login_time = time.time() - start_time driver.find_element(By.XPATH, "//span[contains(.,'Настройки')]").click() driver.find_element(By.XPATH, "//span[contains(.,'Выйти')]").click() exit_time = time.time() - start_time driver.quit() host = ((url.netloc).split(":")[0]).split(".")[0] total_time = int(login_time + exit_time) key = "login_test_" + str(url.port) zabbix_sender = ZabbixSender(zabbix_server='host.ru') metrics = [] m = ZabbixMetric(host, key, total_time) metrics.append(m) zabbix_sender.send(metrics) print (host, url.port, total_time) except Exception as error: print (url.netloc) print (error)
def radio(self): contador1 = 0 RSSIdS1 = RSSIdR1 = RSSIuB1 = RSSIuR1 = 0 # Configura a serial n_serial = self.conf['serial'] ser = serial.Serial( n_serial, 9600, timeout=0.5, parity=serial.PARITY_NONE) # seta valores da serial # Identificação da base e Número de sensores ID_base = 0 #byte 10 (esta informação é adicionada pela base, está aqui por questão didática) ID_repetidor = 20 #byte 8 Num_Sensores = len(self.conf['sensor_name']) ID_destino_volta = 0 # Cria o vetor Pacote PacoteTX = {} PacoteRX = {} # Intervalo entre as medições TEMPO1 = 1 #Cria o vetor para salvar os valores das potências listaPotDesviod1 = {} listaPotDesviou1 = {} # Cria Pacote de 52 bytes com valor zero em todas as posições for i in range(52): # faz um array com 52 bytes PacoteTX[i] = 0 PacoteRX[i] = 0 # Loop Infinito while True: try: contador_tot1 = 0 contador_pot1 = 0 potmediAD2 = 0.0 potacumulAD2 = 0.0 potmeddbd1 = 0.0 contador_err1 = 0 potmediau1 = 0.0 potacumulau1 = 0.0 potmeddbu1 = 0.0 PER1 = 0 AcumDPd1 = 0 AcumDPu1 = 0 AcumVAD2 = 0 AcumVau1 = 0 MedDPd1 = 0 MedDPu1 = 0 DPd1 = 0 DPu1 = 0 PotMaxd1 = -200 PotMind1 = 10 PotMaxu1 = -200 PotMinu1 = 10 # Imprime na tela o Menu de Opções print '[*] Opções do Programa:' print '1 - Mede Temperatura, Umidade e Luminosidade' print 's - Para sair' print # Leitura da opção escolhida no menu Opcao = raw_input('Comando:') if Opcao == "1": # Entra com a opção num_medidas = raw_input('Entre com o número de medidas = ') w = int(num_medidas) for j in range( 0, w ): #Inicializa uma lista para gravar as potências e calcular o desvio padrão listaPotDesviod1[j] = 0 listaPotDesviou1[j] = 0 Log = strftime("Coleta_de_dados_%Y_%m_%d_%H-%M-%S.txt") print "Arquivo de log: %s" % Log S = open(Log, 'w') for s in range(Num_Sensores): ID_destino_ida1 = s for j in range(w): # Limpa o buffer da serial ser.flushInput() #Contador de PacoteTX PacoteTX[13] = contador1 + 1 PacoteTX[37] = 1 #Liga LDR # Coloca no pacote o ID_sensor e ID_base PacoteTX[8] = int(ID_repetidor) PacoteTX[9] = int(ID_destino_ida1) PacoteTX[10] = int(ID_base) PacoteTX[11] = int(ID_destino_volta) # TX pacote - envia pacote para a base transmitir for i in range(52): ser.write(chr(PacoteTX[i])) # Tempo de espera para que receba a resposta do sensor time.sleep(0.1) # RX pacote - recebe o pacote enviado pelo sensor PacoteRX = ser.read( 52 ) # Faz a leitura de 52 bytes do buffer que recebe da serial pela COM # Checa se recebeu 52 bytes if len(PacoteRX) == 52: rssidS1 = ord( PacoteRX[0]) # RSSI_DownLink_Sensor rssidR1 = ord( PacoteRX[1]) # RSSI_DownLink_Repetidor rssiuR1 = ord( PacoteRX[3]) # RSSI_UpLink_Repetidor rssiuB1 = ord(PacoteRX[2]) # RSSI_UpLink_Base # RSSI Downlink_Sensor - potência recebida pelo sensor if rssidS1 > 128: RSSIdS1 = ((rssidS1 - 256) / 2.0) - 74 else: RSSIdS1 = (rssidS1 / 2.0) - 74 # RSSI Downlink_Repetidor - potência recebida pelo repetidor da base if rssidR1 > 128: RSSIdR1 = ((rssidR1 - 256) / 2.0) - 81 else: RSSIdR1 = (rssidR1 / 2.0) - 81 # RSSI Downlink_Sensor - potência recebida pelo sensor if rssiuR1 > 128: RSSIuR1 = ((rssiuR1 - 256) / 2.0) - 81 else: RSSIuR1 = (rssiuR1 / 2.0) - 81 # RSSI Uplink_Base - potência recebida pela base if rssiuB1 > 128: RSSIuB1 = ((rssiuB1 - 256) / 2.0) - 81 else: RSSIuB1 = (rssiuB1 / 2.0) - 81 # Leitura do AD0 ad0t = ord( PacoteRX[16] ) # tipo de sensor - no caso está medindo temperatura ad0h = ord(PacoteRX[17]) # alto ad0l = ord(PacoteRX[18]) # baixo AD1 = float(ad0h * 256 + ad0l) / 100 # Leitura do AD2 AD2t = ord( PacoteRX[19] ) # tipo de sensor - no caso está medindo umidade AD2h = ord(PacoteRX[20]) # alto AD2l = ord(PacoteRX[21]) # baixo AD2 = float(AD2h * 256 + AD2l) / 100 # Leitura do AD3 AD3t = ord( PacoteRX[22] ) # tipo de sensor - no caso está medindo luminosidade AD3h = ord(PacoteRX[23]) # alto AD3l = ord(PacoteRX[24]) # baixo AD3 = float(AD3h * 256 + AD3l) / 100 if RSSIdS1 > PotMaxd1: PotMaxd1 = RSSIdS1 if RSSIdS1 < PotMind1: PotMind1 = RSSIdS1 if RSSIuB1 > PotMaxu1: PotMaxu1 = RSSIuB1 if RSSIuB1 < PotMinu1: PotMinu1 = RSSIuB1 listaPotDesviod1[ contador_pot1] = RSSIdS1 # Grava a potência de downlink para cálculo do desvio padrão listaPotDesviou1[ contador_pot1] = RSSIuB1 # Grava a potência de uplink para cálculo do desvio padrão contador_pot1 = contador_pot1 + 1 # Incrementa o contador utilizado para a média de potência e para o desvio padrão potmwd1 = pow( 10, (RSSIdS1 / 10) ) # Converte a potência de downlink em dBm para mW. potacumulAD2 = potacumulAD2 + potmwd1 # Soma a potência em mW em um acumulador potmwu1 = pow( 10, (RSSIuB1 / 10) ) # Converte a potência de uplink em dBm para mW potacumulau1 = potacumulau1 + potmwu1 print 'SENSOR:', s + 1, 'LEITURA:', j + 1 print time.asctime( ), 'Temperatura', AD1, 'ºC', 'Umidade', AD2, '%', 'Luminosidade', AD3, 'Lúmen', ' RSSIdS', RSSIdS1, 'dBm', ' RSSIdR', RSSIdR1, 'dBm', ' RSSIuR', RSSIuR1, 'dBm', ' RSSIuB', RSSIuB1, 'dBm' print >> S, 'SENSOR:', s + 1, 'LEITURA:', j + 1 print >> S, time.asctime( ), j, 'Temperatura', AD1, 'ºC', 'Umidade', AD2, '%', 'Luminosidade', AD3, 'Lúmen', ' RSSIdS', RSSIdS1, 'dBm', ' RSSIdR', RSSIdR1, 'dBm', ' RSSIuR', RSSIuR1, 'dBm', ' RSSIuB', RSSIuB1, 'dBm' time.sleep(int(TEMPO1)) # Define as variáveis do Script que alimentarão os itens no Zabbix L1 = [ AD1, AD2, AD3, RSSIdS1, RSSIdR1, RSSIuR1, RSSIuB1 ] # Faz um loop para enviar cada valor de métrica para cada item no Zabbix if self.conf['zabbix_enable']: try: x = 0 while x < len( self.conf['zabbix_keys']): # Envia valores para o Zabbix z = self.conf['sensor_name'][s] metrics = [] m = ZabbixMetric( z, self.conf['zabbix_keys'][x], L1[x]) metrics.append(m) config_file = self.conf[ 'agentd_config'] if self.conf[ 'agentd_config'] else None zbx = ZabbixSender( zabbix_server=self. conf['server_name'], zabbix_port=self. conf['zabbix_port'], use_config=config_file) zbx.send(metrics) x += 1 except Exception as error: print 'Problemas de comunicação com o Servidor Zabbix:', error print >> S, time.asctime( ), 'Problemas de comunicação com o Servidor Zabbix:', error else: contador_err1 = contador_err1 + 1 print 'Perda de pacote' time.sleep(int(TEMPO1)) contador_tot1 = contador_tot1 + 1 # Relatório do Sensor if contador_pot1 == 0: contador_pot1 = 1 for l in range(0, contador_pot1): AcumVAD2 = AcumVAD2 + listaPotDesviod1[ l] # Acumula o valor da lista para calcular a média AcumVau1 = AcumVau1 + listaPotDesviou1[ l] # Acumula o valor da lista para calcular a média MedDPd1 = float(AcumVAD2) / float(contador_pot1) MedDPu1 = float(AcumVau1) / float(contador_pot1) for m in range(0, contador_pot1): AcumDPd1 = AcumDPd1 + pow( (listaPotDesviod1[m] - MedDPd1), 2) # Acumula o valor da variancia AcumDPu1 = AcumDPu1 + pow( (listaPotDesviou1[m] - MedDPu1), 2) # Acumula o valor da variancia DPd1 = float(AcumDPd1) / float( contador_pot1) # Termina o calculo da variancia DPu1 = float(AcumDPu1) / float( contador_pot1) # Termina o calculo da variancia potmediAD2 = potacumulAD2 / contador_pot1 if potmediAD2 == 0: potmediAD2 = 0 else: potmeddbd1 = 10 * math.log10(potmediAD2) print print 'RELATÓRIO SENSOR:', s + 1 print 'A Potência Média de Downlink em dBm foi:', potmeddbd1, ' dBm' print 'A Potência Máxima de Downlink em dBm foi:', PotMaxd1, ' dBm' print 'A Potência Mínima de Downlink em dBm foi:', PotMind1, ' dBm' print 'O Desvio Padrão do sinal de Downlink foi:', DPd1 print >> S, time.asctime() print >> S, time.asctime(), 'RELATÓRIO SENSOR:', s + 1 print >> S, time.asctime( ), ' A Potência Média de Downlink em dBm foi:', potmeddbd1, ' dBm' print >> S, time.asctime( ), 'A Potência Máxima de Downlink em dBm foi:', PotMaxd1, ' dBm' print >> S, time.asctime( ), 'A Potência Mínima de Downlink em dBm foi:', PotMind1, ' dBm' print >> S, time.asctime( ), 'O Desvio Padrão do sinal de Downlink foi:', DPd1 potmediau1 = potacumulau1 / contador_pot1 if potmediau1 == 0: potmediau1 = 0 else: potmeddbu1 = 10 * math.log10(potmediau1) print 'A Potência Média de Uplink em dBm foi:', potmeddbu1, ' dBm' print 'A Potência Máxima de Uplink em dBm foi:', PotMaxu1, ' dBm' print 'A Potência Mínima de Uplink em dBm foi:', PotMinu1, ' dBm' print 'O Desvio Padrão do sinal de Uplink foi:', DPu1 print >> S, time.asctime( ), ' A Potência Média de Uplink em dBm foi:', potmeddbu1, ' dBm' print >> S, time.asctime( ), 'A Potência Máxima de Uplink em dBm foi:', PotMaxu1, ' dBm' print >> S, time.asctime( ), 'A Potência Mínima de Uplink em dBm foi:', PotMinu1, ' dBm' print >> S, time.asctime( ), 'O Desvio Padrão do sinal de Uplink foi:', DPu1 PER1 = (float(contador_err1) / float(contador_tot1)) * 100 print 'A PER foi de:', float(PER1), '%' print >> S, time.asctime(), 'A PER foi de:', float( PER1), '%' print print >> S, time.asctime() # Define as variáveis do Script que alimentarão os itens no Zabbix L2 = [ potmeddbu1, PotMaxu1, PotMinu1, DPu1, potmeddbd1, PotMaxd1, PotMind1, DPd1 ] # Faz um loop para enviar cada valor de métrica para cada item no Zabbix if self.conf['zabbix_enable']: try: x = 0 while x < len(self.conf['zabbix_keys_rel']): # Envia valores para o Zabbix z = self.conf['sensor_name'][s] metrics = [] m = ZabbixMetric( z, self.conf['zabbix_keys_rel'][x], L2[x]) metrics.append(m) config_file = self.conf[ 'agentd_config'] if self.conf[ 'agentd_config'] else None zbx = ZabbixSender( zabbix_server=self.conf['server_name'], zabbix_port=self.conf['zabbix_port'], use_config=config_file) zbx.send(metrics) x += 1 except Exception as error: print 'Problemas de comunicação com o Servidor Zabbix:', error print >> S, time.asctime( ), 'Problemas de comunicação com o Servidor Zabbix:', error S.close() else: # Opção de saída ser.close() # Fecha a porta COM print print '[*] Fim da Execução' # Escreve na tela break ser.flushInput() except KeyboardInterrupt: S.close() ser.close() break