예제 #1
0
    def query(self, **kwargs):
        config = kwargs["config"]
        user = config["user"]
        password = config["password"]
        domain = config["domain"]
        port = config["port"]
        uri = config["uri"]
        pgi = None
        try:
            pgi = self.find_single_process_group(pgi_name('mesos-master'))
        except ConfigException:
            try:
                pgi = self.find_single_process_group(
                    pgi_name('mesosphere.marathon.Main'))
            except ConfigException:
                pass

        if pgi is not None:
            pgi_id = pgi.group_instance_id
            stats_url = ("http://" + domain + ":" + port + uri)
            stats = json.loads(
                requests.get(stats_url,
                             auth=(user, password)).content.decode())

            for metric in self.metrics:
                stat_key = metric["source"]["stat"]
                print(stat_key)
                stat_value = stats[stat_key]
                metric_key = metric["timeseries"]["key"]

                self.results_builder.absolute(key=metric_key,
                                              value=stat_value,
                                              entity_id=pgi_id)
예제 #2
0
    def query(self, **kwargs):

        pgi = self.find_single_process_group(pgi_name('Windows System'))
        pgi_id = pgi.group_instance_id

        hostname = "dynatrace.com"

        ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'

        context = ssl.create_default_context()
        conn = context.wrap_socket(
            socket.socket(socket.AF_INET),
            server_hostname=hostname,
        )

        # 3 second timeout because Lambda has runtime limitations
        conn.settimeout(3.0)

        conn.connect((hostname, 443))
        ssl_info = conn.getpeercert()

        # parse the string from the certificate into a Python datetime object
        ssl_expiry_datetime = datetime.datetime.strptime(
            ssl_info['notAfter'], ssl_date_fmt)

        raw_diff = ssl_expiry_datetime - datetime.datetime.utcnow()

        result_seconds = raw_diff.total_seconds()

        result = result_seconds / 60 / 60 / 24

        self.results_builder.absolute(key='days_remaining',
                                      value=result,
                                      entity_id=pgi_id)
예제 #3
0
 def query(self, **kwargs):
     pgi = self.find_single_process_group(pgi_name('springmin.Application'))
     pgi_id = pgi.group_instance_id
     self.results_builder.absolute(key='random',
                                   value=1.0,
                                   entity_id=pgi_id)
     self.results_builder.relative(key='counter',
                                   value=1.0,
                                   entity_id=pgi_id)
예제 #4
0
    def query(self, **kwargs):
        pgi = self.find_single_process_group(pgi_name('oneagent_sdk.demo_app'))
        pgi_id = pgi.group_instance_id

        stats_url = "http://localhost:8769"
        stats = json.loads(requests.get(stats_url).content.decode())

        self.results_builder.absolute(key='random', value=stats['random'], entity_id=pgi_id)
        self.results_builder.relative(key='counter', value=stats['counter'], entity_id=pgi_id)
예제 #5
0
    def query(self, **kwargs):

        pgi = self.find_single_process_group(pgi_name('Windows System'))
        pgi_id = pgi.group_instance_id

        queue1 = winstats.get_perf_data(r'\MSMQ Queue(<hostname>\private$\<queuename>)\Messages in Queue', fmts='double', delay=100)
        queue2 = winstats.get_perf_data(r'\MSMQ Queue(<hostname>\private$\<queuename>)\Messages in Queue', fmts='double', delay=100)

        self.results_builder.relative(key='<queuename>', value=<queuename>[0], entity_id=pgi_id)
        self.results_builder.relative(key='<queuename>', value=<queuename>[0], entity_id=pgi_id)
예제 #6
0
 def query(self, **kwargs):
     pgi = self.find_single_process_group(pgi_name('puppet'))
     pgi_id = pgi.group_instance_id
     #stats_url = "http://localhost:8769"
     #stats = json.loads(requests.get(stats_url).content.decode())
     #self.results_builder.absolute(key='random', value=stats['random'], entity_id=pgi_id)
     #self.results_builder.relative(key='counter', value=stats['counter'], entity_id=pgi_id)
     # open("testfile.txt","w+").write("Hello")
     stats_url = "http://api.coindesk.com/v1/bpi/currentprice.json"
     stats = json.loads(requests.get(stats_url).content.decode())
     btc_value = stats['bpi']['USD']['rate_float']
     print(btc_value)
     self.results_builder.absolute(key="BTC",
                                   value=stats['bpi']['USD']['rate_float'],
                                   entity_id=pgi_id)
    def query(self, **kwargs):
        pgi = self.find_single_process_group(pgi_name('DirectAccess'))
        pgi_id = pgi.group_instance_id

        json_file_path = "D:\\Software\\Scripts\\DirectAccessStats.JSON"
        stats = {}

        try:
            with open(json_file_path, encoding='utf-8') as json_file:
                clean_file = json_file.read().replace('\ufeff', '')
                try:
                    stats = json.loads(clean_file)
                except ValueError as ex:
                    raise ConfigException('Unable to parse "%s" as JSON' %
                                          json_file_path) from ex
        except IOError as ex:
            raise ConfigException('Could not open file "%s"' %
                                  json_file_path) from ex

        self.results_builder.absolute(key='total_connections',
                                      value=stats['TotalConnections'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='total_DA_connections',
                                      value=stats['TotalDAConnections'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='total_vpn_connections',
                                      value=stats['TotalVpnConnections'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='total_unique_users',
                                      value=stats['TotalUniqueUsers'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='max_concurrent_connections',
                                      value=stats['MaxConcurrentConnections'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(
            key='total_cumulative_connections',
            value=stats['TotalCumulativeConnections'],
            entity_id=pgi_id)
        self.results_builder.absolute(key='total_bytes_in',
                                      value=stats['TotalBytesIn'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='total_bytes_out',
                                      value=stats['TotalBytesOut'],
                                      entity_id=pgi_id)
        self.results_builder.absolute(key='total_bytes_in_out',
                                      value=stats['TotalBytesInOut'],
                                      entity_id=pgi_id)
    def query(self, **kwargs):
        config = kwargs["config"]
        filename = config["filename"]
        pginame = config["pginame"]

        unaccessed = []
        for metric in metrics:
            # only report a value of 0 for metrics with no capture group
            if metric.pattern.groups == 0:
                unaccessed.append(metric.name)

        pgi = self.find_single_process_group(pgi_name(pginame))
        pgi_id = pgi.group_instance_id

        try:
            inf = open(filename, 'r')
            for line in inf.readlines():
                for metric in metrics:
                    matched, value = metric.match(line)
                    if not matched:
                        continue
                    self.results_builder.absolute(key=metric.name,
                                                  value=value,
                                                  entity_id=pgi_id)
                    if metric.name in unaccessed:
                        unaccessed.remove(metric.name)
        except Exception as ex:
            raise ConfigException(
                'Caught exception while trying to open file "%s": %s' %
                (filename, ex)) from ex
        finally:
            if 'inf' in locals():
                try:
                    inf.close()
                except:
                    pass

        # report a value of 0 for those metrics that were not found
        for n in unaccessed:
            self.results_builder.absolute(key=n, value=0, entity_id=pgi_id)
    def query(self, **kwargs):
        config = kwargs["config"]
        user = config["user"]
        password = config["password"]
        domain = config["domain"]
        port = config["port"]
        uri = config["uri"]

        pgi = self.find_single_process_group(pgi_name('openshift'))
        pgi_id = pgi.group_instance_id
        stats_url = ("http://" + domain + ":" + port + uri)
        #stats_url = ("http://64.15.185.89:9000/metrics/snapshot")
        stats = json.loads(
            requests.get(stats_url, auth=(user, password)).content.decode())

        for metric in self.metrics:
            stat_key = metric["source"]["stat"]
            print(stat_key)
            stat_value = stats[stat_key]
            metric_key = metric["timeseries"]["key"]

            self.results_builder.absolute(key=metric_key,
                                          value=stat_value,
                                          entity_id=pgi_id)
예제 #10
0
	def query(self, **kwargs):
		#initialize variables
		processName = "org.opends.server.core.DirectoryServer"
		hostName = "localhost"
		userName = "******"
		userPassword = "******"
		pathToKey = ""
		hostKey = "null"
		key = ""
		pathToLDAPSearch = "/opt/opendj/bin"
		ldapPort = "1389"
		bindDN = "cn=Directory Manager"
		bindPassword = ""
		baseDN = "cn=Replication,cn=monitor"
		
		config = kwargs['config']
		#add in variable values from kwargs
		try:
			if processName in config:
				processName = config['processName']
			if 'userName' in config:
				userName = config['userName']
			if 'userPassword' in config:
				userPassword = config['userPassword']
			if 'pathToKey' in config:
				pathToKey = config['pathToKey']
			if 'hostKey' in config:
				hostKey = config['hostKey']
			if 'pathToLDAPSearch' in config:
				pathToLDAPSearch = config['pathToLDAPSearch']
			if 'ldapPort' in config:
				ldapPort = config['ldapPort']
			if bindPassword in config:
				bindPassword = config['bindPassword']
			self.logger.info ('--- List of kwargs --')
			for item in config.values():
				self.logger.info(item)
		except:
			self.logger.info('There was an error with the parameters.')
		
		# Find Dynatrace pgi_id from oneAgent monitoring of OpenDJ
		pgi = self.find_single_process_group(pgi_name(processName))
		pgi_id = pgi.group_instance_id
		
		#these are the metrics the values will be captured for
		key_mapper = ["lost-connections", "received-updates", "sent-updates", "replayed-updates", "pending-updates", "replayed-updates-ok", "resolved-modify-conflicts", "resolved-naming-conflicts", "unresolved-naming-conflicts", "missing-changes", "approximate-delay"]
		
		#connect to host
		client = paramiko.SSHClient()
		try:
			if userName != "null" and hostName != "null":
				if pathToKey != "":
					#Using private keys to connect to host 
					key = paramiko.RSAKey.from_private_key_file(pathToKey)
					client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
					client.connect(hostname = hostName, username = userName, pkey = key)
				elif userPassword != "null" and hostKey != "null":
					##Using host key verification & password auth to connect
					key = paramiko.RSAKey(data=base64.b64decode(hostKey)) #base64 RSA host key for verification
					client.get_host_keys().add(hostName, 'ssh-rsa', key)
					client.connect(hostName, username=userName, password=userPassword, timeout=30)
			else :
				self.logger.info ('No User or Host provided - Could not Connect') 
		except:
			self.logger.info('Generic Could not Connect to Host')
		
		if ldapPort != "" and bindPassword != "":
			linuxCommand = 'cd ' + pathToLDAPSearch + ' ; ./ldapsearch --port ' + ldapPort + ' --bindDN "' + bindDN + '" --bindPassword ' + bindPassword + ' --baseDN "' + baseDN + '" --searchScope sub "(objectClass=*)" \* + lost-connections received-updates sent-updates replayed-updates pending-updates replayed-updates-ok resolved-modify-conflicts resolved-naming-conflicts unresolved-naming-conflicts missing-changes approximate-delay'
		else :
			self.logger.info('Issue with LDAP Port or Bind Password')
	
		try: 
			#first move to correct directory then run ldapsearch command and pipe all data to stdin, stdout, & stderr
			stdin, stdout, stderr = client.exec_command(linuxCommand)
		except:
			self.logger.info('Issue with running linux ldapsearch command')
		
		#for each line check to see if it contains a wanted variable
		for line in stdout:
			strArray = line.split(":")
			measureValue = strArray[1].strip(' ')
			
			if strArray[0] in key_mapper :
				self.logger.info(strArray[0] + ' : ' + strArray[1])
				self.results_builder.absolute(key=strArray[0], value=measureValue, entity_id=pgi_id) # send measure
		
		try: 
			client.close()
		except:
			self.logger.info("Issue closing client connection")
		finally:
			if client:
				client.close()
 def query(self, **kwargs):
   pgi = self.find_single_process_group(pgi_name('Apache Web Server httpd'))
   pgi_id = pgi.group_instance_id
   stats_url = "http://localhost"
   stats = json.loads(requests.get(stats_url).content.decode())
   self.results_builder.absolute(key='randomNumber', value=stats['randomNumber'], entity_id=pgi_id)