def __init__(self):
        self.data = {}
        self.collectorFunctions = [
            self.getHttpConnectionsMetrics,  # for stub status metrics
            self.getAccessLogs,
            self.getWorkers  # for 
        ]  # the functions which will be used to extract data

        self.meta = {
            'stubStatusUrl': 'http://127.0.0.1/nginx_status',
            'accessLogPath': '/var/log/nginx/access.log',
            'storePath': './store.pkl'
        }
        self.cum = {'timeStamp': 0.0}
        # this is the initial state of data that will be stored , and this is updated as we parse the logs.

        # this is persistent data store, to store variables which need to be seen even after server stops.
        try:
            store = open(self.meta['storePath'], 'rb')
            data = pickle.load(store)
            store.close()
        except:
            store = open(self.meta['storePath'], 'wb')
            self.cum = {
                'timeStamp': 0.0,
                "connectionAccepted": 0,
                "connectionsDropped": 0,
                'requestCount': 0,
                'IoKiloBytesRead': 0,
                'IoKiloBytesWritten': 0
            }

            pickle.dump(self.cum, store)
            store.close()
            logger.log('No data in store, service started for the first time.')
    def getAccessLogs(self):
        accessLogsList = []
        store = open(self.meta['storePath'],
                     'rb')  #reading the previous persistently stored variable.
        cumMetrics = pickle.load(store)
        store.close()

        isset = False
        try:
            with FileReadBackwards(
                    self.meta['accessLogPath'], encoding='utf-8'
            ) as f:  # reading the file backwards till the lines which are generated in the last one minute.
                for line in f:
                    accessLogData = {}
                    for value in line.split('?'):
                        pair = value.split('*')
                        accessLogData[pair[0].strip()] = pair[1]
                    # we parse the access,
                    #Converting time to unix timestamp format
                    timestamp = datetime.strptime(
                        accessLogData['time_local'],
                        '%d/%b/%Y:%H:%M:%S %z').timestamp()
                    if not isset:
                        new_time_stamp = timestamp
                        isset = True
                    if timestamp <= cumMetrics[
                            'timeStamp']:  ## Reading till last read log
                        break
                    accessLogsList.append(accessLogData)
            self.cum['timeStamp'] = new_time_stamp
            self.extractMetrics(accessLogsList, new_time_stamp)
        except:
            logger.log("Error while parsing the access logs of nginx")
Exemplo n.º 3
0
def restartNginx():
    try: 
        os.popen("systemctl restart nginx")
        return 1
    except: 
        logger.log("Unable to restart the server.")
        print("Error restarting nginx, please check if you have nginx server installed.")
        exit()
Exemplo n.º 4
0
 def cpu(self):
     """ cpu """
     try:
         cpuTimes = psutil.cpu_times_percent(1)
         self.data['userCPU']=(cpuTimes.user + cpuTimes.nice)
         self.data['systemCPU']=cpuTimes.system + cpuTimes.irq + cpuTimes.softirq
         self.data['idleCPU']=cpuTimes.idle
     except:
         logger.log("error accessing cpu percent")
     return  
Exemplo n.º 5
0
 def swap(self):
     """ swap memory details """
     try:
         swapMemory = psutil.swap_memory()
         self.data['totalSwapMemory']=swapMemory.total/self.val
         self.data['usedSwapMemory']=swapMemory.used/self.val
         self.data['freeSwapMemory']=swapMemory.free/self.val
         self.data['percentFreeSwapMemory']=swapMemory.percent
     except:
         logger.log("error getting swap memory details")
         
     return
 def setData(self):
     handles = []
     for i in self.collectorFunctions:
         handles.append(i())
     for thread in handles:
         thread.join()  ## joining all the threads to wait for completion
     try:
         store = open(
             self.meta['storePath'], 'wb'
         )  # this is persistent data store, to store variables which need to be seen even after server stops.
         pickle.dump(self.cum,
                     store)  # storing the latest data in the store.
     except:
         logger.log("error dumping the data to store.")
     store.close()
     return self.data
Exemplo n.º 7
0
    def getData(self):
        self.getDataFinished = False
        queue = persistqueue.FIFOSQLiteQueue(
            './database', auto_commit=True)
        # here the chunks of number of requests are sent to the database.
        data = []
        while(queue.size > 0):
            cnt = 0
            while(queue.size > 0 and cnt < self.maxReqSize):
                data.append(queue.get())
                cnt += 1
        # API call
        try:
            print(data)
            response = requests.post('https://software-engineering-308707.el.r.appspot.com/aapi/agent/dyn', json={'data':data}, headers={
                'Authorization': 'Bearer '+os.environ.get("TOKEN")
            }) # this is  the posting api call to the backend. 
            logger.log("Successfully sent the data")

        except:
            logger.log("There was an api error, request failed")
            for i in data:
                queue.put(i)
        self.getDataFinished = True
Exemplo n.º 8
0
def setNginxConfig(): 
    logFormat="""log_format complete 'site*$server_name? server*$host?dest_port*$server_port?  dest_ip*$server_addr?'
                           'src*$remote_addr?  src_ip*$realip_remote_addr? user*$remote_user? '
                           'time_local*$time_local? protocol*$server_protocol? status*$status? '
                           'bytes_out*$bytes_sent? bytes_in*$upstream_bytes_received? '
                           'http_referer*$http_referer? http_user_agent*$http_user_agent? '
                           'nginx_version*$nginx_version? http_x_forwarded_for*$http_x_forwarded_for? '
                           'http_x_header*$http_x_header? uri_query*$query_string? uri_path*$uri? '
                           'http_method*$request_method? upstream_response_time*$upstream_response_time? '
                            'cookie*$http_cookie? request_time*$request_time? category*$sent_http_content_type? https*$https?'
    			'remote_addr*$remote_addr? remote_user*$remote_user?'
                        'request*$request? body_bytes_send*$body_bytes_sent?'
                        'upstream_adder*$upstream_addr? upstream_status*$upstream_status?'
                        'upstream_response_length*$upstream_response_length? upstream_cache_status*$upstream_cache_status?'
                        'http_referer*$http_referer? http_user_agent*$http_user_agent';"""
    accessLog="access_log /var/log/nginx/access.log complete;"  
    try:
        file=open('/etc/nginx/nginx.conf', 'r+')
        text=file.read()
        file.close()
    except IOError:
        logger.log("Unable to read configuration file")  
        print("Please try again,  ")
        exit()
    x=text.find('log_format')
    y=text[x:].find(';')
    text=text[:x]+logFormat+text[x+y+1:]
    file=open('input.txt','w+')
    x=text.find('access_log')
    y=text[x:].find(';')
    text=text[:x]+accessLog+text[x+y+1:]
    file=open('input.txt','w+')
    file.write(text)
    file.close()
    logger.log('nginx.conf set successfully!!')
    return 1
    def getHttpConnectionsMetrics(self):
        try:
            stubData = requests.get(self.meta['stubStatusUrl'])
            try:
                store = open(
                    self.meta['storePath'], 'rb'
                )  # this is persistent data store, to store variables which need to be seen even after server stops.
                cumMetrics = pickle.load(
                    store)  # getting the data from the store.
                store.close()
                stubStatusRegex = re.compile(
                    r'^Active connections: (?P<connections>\d+)\s+[\w ]+\n'
                    r'\s+(?P<accepts>\d+)'
                    r'\s+(?P<handled>\d+)'
                    r'\s+(?P<requests>\d+)'
                    r'\s+Reading:\s+(?P<reading>\d+)'
                    r'\s+Writing:\s+(?P<writing>\d+)'
                    r'\s+Waiting:\s+(?P<waiting>\d+)'
                )  # regex for parsing the stubStatusText.

                stubMatchings = stubStatusRegex.match(
                    stubData.text)  # returns the matchings.
                httpMetrics = {}

                for metric in [
                        'connections', 'accepts', 'handled', 'requests',
                        'reading', 'writing', 'waiting'
                ]:
                    httpMetrics[metric] = int(stubMatchings.group(metric))
                self.data['connectionAccepted'] = httpMetrics[
                    'accepts'] - cumMetrics[
                        'connectionAccepted']  #number of connections accepted till this time.
                self.data['connectionsDropped'] = httpMetrics[
                    'accepts'] - httpMetrics['handled'] - cumMetrics[
                        'connectionsDropped']  # number of connections dropped till this time.
                self.data['activeConnections'] = httpMetrics[
                    'connections'] - httpMetrics[
                        'waiting']  # number of active connections right now
                self.data['currentConnections'] = httpMetrics[
                    'connections']  # number of connections right now
                self.data['idleConnections'] = httpMetrics[
                    'waiting']  # number of idle connections right now
                self.data['requestCount'] = httpMetrics['requests'] - cumMetrics[
                    'requestCount']  # number of requests that have been sent till now.
                self.data[
                    'currentRequest'] = httpMetrics['reading'] + httpMetrics[
                        'writing']  # number of currently active requests that are  reading and writing.
                self.data['readingRequests'] = httpMetrics[
                    'reading']  # number of currently active reading headers requests .
                self.data['writingRequests'] = httpMetrics[
                    'writing']  # number of currently active writing requests to clients.
                #######saving the data in the cumulative variable###########
                self.cum['connectionAccepted'] = httpMetrics[
                    'accepts']  #number of connections accepted till this time.
                self.cum['connectionsDropped'] = httpMetrics[
                    'accepts'] - httpMetrics[
                        'handled']  # number of connections dropped till this time.
                self.cum['requestCount'] = httpMetrics[
                    'requests']  # number of requests that have been sent till now.
            except:
                logger.log('Unable to parse stubStatusText')
        except:
            logger.log("Stub Data not received")
            return
Exemplo n.º 10
0
    def getWorkers(self):
        store = open(self.meta['storePath'],
                     'rb')  #reading the previous persistently stored variable.
        cumMetrics = pickle.load(store)

        store.close()
        try:
            stream = os.popen("ps xao pid,ppid,command | grep 'nginx[:]'"
                              )  # command to find out the processes of nginx.
            data = stream.read()
            data = data.split('\n')
        except:
            logger.log("unable to execute the workers terminal command")
        processes = []
        zombies = []
        for line in data:
            grp = re.match(
                r'\s*(?P<pid>\d+)\s+(?P<parent_pid>\d+)\s+(?P<command>.+)\s*',
                line)
            if not grp:
                continue
            pid = int(grp.group('pid'))
            processes.append(psutil.Process(pid))
        """
        memory info

        nginx.workers.mem.rss
        nginx.workers.mem.vms
        nginx.workers.mem.rss_pct
        """
        rss, vms, pct = 0, 0, 0.0
        for p in processes:
            if p.pid in zombies:
                continue
            try:
                mem_info = p.memory_info()
                rss += mem_info.rss
                vms += mem_info.vms
                pct += p.memory_percent()
            except psutil.ZombieProcess:
                self.zombies.append(p.pid)

        self.data['memory.rss'] = rss
        self.data['memory.vms'] = vms
        self.data['memory.rss_pct'] = pct
        self.data['workersCount'] = len(processes)
        """nginx.workers.fds_count"""
        fds = 0
        for p in processes:
            if p.pid in zombies:
                continue
            try:
                fds += p.num_fds()
            except psutil.ZombieProcess:
                self.handle_zombie(p.pid)
        self.data['workers.fds_count'] = fds
        """
        io

        nginx.workers.io.kbs_r
        nginx.workers.io.kbs_w
        """
        # collect raw data
        read, write = 0, 0
        for p in processes:
            if p.pid in zombies:
                continue
            try:
                io = p.io_counters()
                read += io.read_bytes
                write += io.write_bytes
            except psutil.ZombieProcess:
                self.handle_zombie(p.pid)

    # to get kilobytes from bytes.
        read /= 1024
        write /= 1024

        # get deltas and store metrics
        metric_data = {'IoKiloBytesRead': read, 'IoKiloBytesWritten': write}
        for metric_name in metric_data:
            value = metric_data[metric_name]
            value_delta = value - cumMetrics[metric_name]
            self.cum[metric_name] = value
            self.data[metric_name] = value_delta