Пример #1
0
import requests
import time

from datetime import datetime

from . pq_es import PandaQEs
from . pq_pandadb import PandaDBPQ

from baseclasses.infdbbaseclass import InfluxDbBaseClass
from baseclasses.cricinfo import CricInfo
from accounting.error_accounting import Errors

from logger import ServiceLogger

_logger = ServiceLogger("pq_influxdb", __file__).logger


class InfluxPQ(InfluxDbBaseClass):
    def __init__(self, path):
        self.cric = CricInfo(path)
        self.url_pq = 'https://atlas-cric.cern.ch/api/atlas/pandaqueue/query/?json'
        super().__init__(path)

    def write_data_backup(self, tdelta):

        date_key = datetime.now()

        es = PandaQEs(self.path)

        s = es.get_ratio(tdelta)
from datetime import datetime

from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))

from libs.config import Config
from libs.sqlite_cache import Sqlite
from libs.es import Es
from libs.notifications import Notifications
from libs.kibanaXSLS import SlsDocument

from logger import ServiceLogger

BASE_DIR = os.path.dirname(os.path.abspath(__file__))

_logger = ServiceLogger("schedd_monitoring", __file__).logger


def main():
    config = Config(BASE_DIR + '/schedd_configuration/', type='schedd')
    sqlite = Sqlite(BASE_DIR + '/storage/hsm.db', config.XMLconfiguration)

    settings = path.abspath(
        path.join(path.dirname(__file__), '..', 'settings.ini'))

    es = Es(settings)

    schedd_metrics = es.get_schedd_metrics()
    sqlite.scheddhosts_availability(schedd_metrics)
    #
    submissionhosts = sqlite.get_data(type='schedd')
import smtplib
from logger import ServiceLogger

_logger = ServiceLogger("notifications", __file__).logger


class Notifications:
    def __init__(self,
                 to,
                 mailserver="localhost",
                 fromemail="*****@*****.**",
                 text='',
                 subject=''):
        self.to = to
        self.mailserver = mailserver
        self.fromemail = fromemail
        self.text = text
        self.subject = subject

    def send_notification_email(self):
        """
        Send notification email
        """
        SERVER = self.mailserver
        FROM = self.fromemail
        TO = self.to
        SUBJECT = self.subject
        TEXT = self.text

        message = """\
From: {0}
Пример #4
0
import requests, os

from configparser import ConfigParser
from logger import ServiceLogger

_logger = ServiceLogger("cricinfo", __file__, "ERROR").logger


class CricInfo:

    def __init__(self, path):
        try:
            cfg = ConfigParser()
            cfg.read(path)
            self.cert_file = cfg.get('cert', 'cert_file')
            self.key_file = cfg.get('cert', 'key_file')
            self.ca_certs = cfg.get('cert', 'ca_certs')
        except Exception as ex:
            _logger.error(ex)

    def get_cric_info(self, url):
        try:
            r = requests.get(url, cert=(self.cert_file, self.key_file), verify=self.ca_certs)
            return r.json()
        except Exception as ex:
            _logger.error(ex)
Пример #5
0
import json
from baseclasses.mysqlbaseclass import MySQLBaseClass
from baseclasses.cricinfo import CricInfo
from logger import ServiceLogger

_logger = ServiceLogger("filters", __file__, "ERROR").logger

class Filters(MySQLBaseClass):
    def __init__(self, path):
        super().__init__(path)

    def read_country_coordinates(self):
        countries = {}
        from os import path
        json_path = path.join(path.dirname(__file__)) + '/' + 'countries.json'

        with open(json_path) as json_file:
            data = json.load(json_file)
            for country in data:
                if country == 'United Kingdom':
                    countries['UK'] = country['latlng']
                if country == 'Russia':
                    countries['Russian Federation'] = country['latlng']
                if country == 'United States':
                    countries['USA'] = country['latlng']
                    countries['United States of America'] = country['latlng']
                countries[country['name']] = country['latlng']
        return countries

    def write_filters(self):
        cric = CricInfo(self.path)
Пример #6
0
from configparser import ConfigParser
from elasticsearch import Elasticsearch
from logger import ServiceLogger

_logger = ServiceLogger("elasticsearch", __file__, "ERROR").logger


class EsBaseClass:

    def __init__(self, path):
        self.connection = self.__make_connection(path=path)

    # private method
    def __make_connection(self, path, verify_certs=True, timeout=2000, max_retries=10,
                          retry_on_timeout=True):
        """
        Create a connection to ElasticSearch cluster
        """
        try:
            cfg = ConfigParser()
            cfg.read(path)
            eslogin = cfg.get('esserver', 'login')
            espasswd = cfg.get('esserver', 'password')
            host = cfg.get('esserver', 'host')
            сa_path = cfg.get('esserver', 'capath')
        except Exception as ex:
            _logger.error(ex)
            print(ex)
        try:
            connection = Elasticsearch(
                ['https://{0}/es'.format(host)],
Пример #7
0
from configparser import ConfigParser
from sqlalchemy import create_engine

from logger import ServiceLogger

_logger = ServiceLogger("mysql", __file__, 'ERROR').logger


class MySQLBaseClass:
    def __init__(self, path):
        self.connection = self.__make_connection(path=path)
        self.path = path

    # private method
    def __make_connection(self, path):
        """
        Create a connection to InfluxDB
        """
        try:
            cfg = ConfigParser()
            cfg.read(path)
            user = cfg.get('mysql', 'login')
            password = cfg.get('mysql', 'password')
            dbname = cfg.get('mysql', 'dbname')
            host = cfg.get('mysql', 'host')

        except Exception as ex:
            _logger.error(ex)
            print(ex)
        try:
            string_connection = """mysql+pymysql://{0}:{1}@{2}/{3}""".format(
Пример #8
0
#!/usr/bin/python
import json
import requests
import time

collector_endpoint = 'http://monit-metrics:10012/'

from logger import ServiceLogger

_logger = ServiceLogger("xsls", __file__).logger

class SlsDocument:
    def __init__(self):
        self.info = {}
        self.data = {}
        self.id = None
        self.producer = 'panda'

    def set_id(self, id_info):
        self.id = id_info

    def set_status(self, availability):
        if availability in (100, '100'):
            self.info['service_status'] = "available"
        elif availability in (0, '0'):
            self.info['service_status'] = "unavailable"
        else:
            self.info['service_status'] = "degraded"

    def set_avail_desc(self, avail_desc):
        self.info['availabilitydesc'] = avail_desc
Пример #9
0
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))

from datetime import datetime

from libs.config import Config
from libs.sqlite_cache import Sqlite
from libs.pandadb import PandaDB
from libs.es import Es
from libs.notifications import Notifications
from libs.kibanaXSLS import SlsDocument

from logger import ServiceLogger

BASE_DIR = os.path.dirname(os.path.abspath(__file__))

_logger = ServiceLogger("harvester_monitoring", __file__).logger


def main():

    config = Config(BASE_DIR + '/configuration/')
    sqlite = Sqlite(BASE_DIR + '/storage/hsm.db', config.XMLconfiguration)

    settings = path.abspath(
        path.join(path.dirname(__file__), '..', 'settings.ini'))

    pandadb = PandaDB(settings)
    es = Es(settings)

    metrics = pandadb.get_db_metrics()
Пример #10
0
from configparser import ConfigParser
from influxdb import InfluxDBClient

from logger import ServiceLogger

_logger = ServiceLogger("influxdb", __file__, 'ERROR').logger

class InfluxDbBaseClass:

    def __init__(self, path):
        self.connection = self.__make_connection(path=path)
        self.path = path

    # private method
    def __make_connection(self, path):
        """
        Create a connection to InfluxDB
        """
        try:
            cfg = ConfigParser()
            cfg.read(path)
            user = cfg.get('influxdb', 'login')
            password = cfg.get('influxdb', 'password')
            dbname = cfg.get('influxdb', 'dbname')
            host = cfg.get('influxdb', 'host')
            port = cfg.get('influxdb', 'port')

        except Exception as ex:
            _logger.error(ex)
            print(ex)
        try:
Пример #11
0
import xml.etree.ElementTree as ET
import os

from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from logger import ServiceLogger

_logger = ServiceLogger("configuration", __file__).logger


class Config:

    def __init__(self, path, type='hsm'):
        if type == 'hsm':
            self.XMLconfiguration = self.__read_harvester_configs_xml(path)
        elif type == 'schedd':
            self.XMLconfiguration = self.__read_schedd_configs_xml(path)

    #####private method####
    def __read_harvester_configs_xml(self, path):
        """
        Read harvester monitoring metrics from XML files
        """
        try:
            configuration = {}
            for file in os.listdir(path):
                if file.endswith(".xml"):
                    tree = ET.parse(os.path.join(path, file))
                    root = tree.getroot()
                    for harvesterid in root:
                        configuration[harvesterid.attrib['harvesterid']] = {}
Пример #12
0
import getopt, subprocess, socket, re, cx_Oracle, requests, json, psutil, numpy as np

from os import sys
from datetime import datetime
from configparser import ConfigParser
from logger import ServiceLogger

_logger = ServiceLogger("cron", __file__).logger

class DateTimeEncoder(json.JSONEncoder):
    def default(self, o):
        if isinstance(o, datetime):
            return o.isoformat()

def cpu_info():
    cpu_times = psutil.cpu_times()
    cpu_usage_list = []
    for x in range(5):
        cpu_usage_list.append(psutil.cpu_percent(interval=2, percpu=True))
    return cpu_times, cpu_usage_list

def memory_info():
    memory_virtual = psutil.virtual_memory()
    memory_swap = psutil.swap_memory()
    return memory_virtual, memory_swap

def disk_info(disk=''):
    if disk == '':
        full_path = '/'
    else:
        full_path = '/' + disk
Пример #13
0
from elasticsearch_dsl import Search, Q
from datetime import datetime, timedelta

from baseclasses.esbaseclass import EsBaseClass

from logger import ServiceLogger

_logger = ServiceLogger("es_gahp", __file__).logger


class GahpMonitoringEs(EsBaseClass):
    def __init__(self, path):
        super().__init__(path)

    def get_info_workers(self, type, tdelta=60, time='submittime'):

        connection = self.connection

        date_UTC = datetime.utcnow()
        date_str = date_UTC - timedelta(minutes=tdelta)
        #genes_filter = Q('bool', must=[Q('terms', status=['failed', 'finished', 'canceled', 'missed'])])
        s = Search(using=connection, index='atlas_harvesterworkers-*')
        s = s.filter(
            'range', **{
                time: {
                    'gte':
                    date_str.strftime("%Y-%m-%dT%H:%M")[:-1] + '0:00',
                    'lt':
                    datetime.utcnow().strftime("%Y-%m-%dT%H:%M")[:-1] + '0:00'
                }
            })
import cx_Oracle

from configparser import ConfigParser
from logger import ServiceLogger

_logger = ServiceLogger("oracledb", __file__, 'ERROR').logger


class OracleDbBaseClass:
    def __init__(self, path):
        self.connection = self.__make_connection(path)
        self.path = path

    # private method
    def __make_connection(self, path):
        """
        Create a database connection to the PanDA database
        """
        try:
            cfg = ConfigParser()
            cfg.read(path)
            dbuser = cfg.get('pandadb', 'login')
            dbpasswd = cfg.get('pandadb', 'password')
            description = cfg.get('pandadb', 'description')
        except:
            pass
        try:
            connection = cx_Oracle.connect(dbuser, dbpasswd, description)
            return connection
        except Exception as ex:
            _logger.error(ex)
Пример #15
0
import json
from logger import ServiceLogger
from baseclasses.oracledbbaseclass import OracleDbBaseClass

_logger = ServiceLogger("pandadb", __file__).logger


class PandaDB(OracleDbBaseClass):
    def __init__(self, path):
        super().__init__(path)

    def get_db_metrics(self):
        """
        Get metrics from PandaDB
        """
        try:
            connection = self.connection
            metrcis = {}

            query = """
            SELECT t.harvester_id, t.harvester_host, t.CREATION_TIME, t.METRICS FROM ( 
            SELECT harvester_id, harvester_host, MAX(creation_time) AS CREATION_TIME
            FROM atlas_panda.harvester_metrics
            GROUP BY harvester_id, harvester_host) x 
            JOIN atlas_panda.harvester_metrics t ON x.harvester_id = t.harvester_id
            AND x.harvester_host = t.harvester_host AND x.CREATION_TIME = t.CREATION_TIME
            """

            results = self.__read_query(query, connection)

            for row in results:
Пример #16
0
from elasticsearch_dsl import Search
from datetime import datetime
from logger import ServiceLogger
from baseclasses.esbaseclass import EsBaseClass

_logger = ServiceLogger("es", __file__).logger


class Es(EsBaseClass):
    def __init__(self, path):
        super().__init__(path)

    def get_workers_stats(self):
        """
        Get workers stats for harvester hosts
        """
        connection = self.connection

        s = Search(using=connection, index='atlas_harvesterworkers-*')[:0]

        #s = s.exclude('terms', status=['missed'])

        s.aggs.bucket('harvesterid', 'terms', field='harvesterid.keyword', size=10000) \
            .metric('max_submittime', 'max', field='submittime') \
            .metric('min_submittime', 'min', field='submittime') \
            .bucket('harvesterhost', 'terms', field='harvesterhost.keyword', order={'max_hostsubmittime': 'desc'},
                    size=10000) \
            .metric('max_hostsubmittime', 'max', field='submittime')

        s = s.execute()
Пример #17
0
import time
import copy

from datetime import datetime

from baseclasses.infdbbaseclass import InfluxDbBaseClass
from accounting.error_accounting import Errors

from . gahp_es import GahpMonitoringEs
from logger import ServiceLogger

_logger = ServiceLogger("influxdb_gahp",__file__).logger


class InfluxDbGahp(InfluxDbBaseClass):

    def __init__(self, path):
        super().__init__(path)

    def write_data_tmp(self, tdelta):

        es = GahpMonitoringEs(self.path)

        tmp_harvester_schedd = es.get_info_workers(tdelta=tdelta, type="gahp", time='submittime')
        harvester_schedd = copy.deepcopy(tmp_harvester_schedd)
        errors_object = Errors('patterns.txt')

        harvester_schedd_errors = {}

        for schedd in tmp_harvester_schedd:
            harvester_schedd_errors[schedd] = {}