Ejemplo n.º 1
0
	def __init__(self,query="",data="",redis_key="",redis_conn_id = "redis_hook_6",mysql_conn_id='mysql_uat',*args, **kwargs):
		super(MySqlLoaderOperator, self).__init__(*args, **kwargs)
		self.mysql_conn_id = mysql_conn_id
		self.sql = query
		self.data = data
		self.redis_key = redis_key			 
		self.redis_hook = RedisHook(redis_conn_id = redis_conn_id)
		self.redis_conn_id = redis_conn_id
 def execute(self, context):
     data = self.extract()
     timestamp = int(time.time())
     payload_dict = {
         "DAG_name": context.get("task_instance_key_str").split('_')[0],
         "task_name": context.get("task_instance_key_str"),
         "payload": data,
         "timestamp": timestamp
     }
     redis = RedisHook(redis_conn_id="redis")
     conn = redis.get_conn()
     redis.add_event(identifier, timestamp, payload_dict)
Ejemplo n.º 3
0
    def test(self):
        # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.htm
        redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4")
        nw_keys = redis_hook_4.get_keys("nw_agg_nocout_*")
        data_to_page = []
        attributes = []

        for key in nw_keys:
            data = redis_hook_4.rget(key)
            for slot in data:
                slot = eval(slot)
                for k, v in enumerate(slot):
                    device = eval(v)
                    data_to_page.append(device)

        return self.render("rules_plugin/rules.html",
                           attributes=attributes,
                           data=data_to_page)
Ejemplo n.º 4
0
    def test(self):
        # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.htm
        redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")

        pl_states = eval(redis_hook_5.get("all_devices_state"))
        rta_states = eval(redis_hook_5.get("all_devices_state_rta"))
        last_down_states = eval(redis_hook_5.get("all_devices_down_state"))

        pl_list = []
        rta_list = []
        down_list = []

        data_to_page = []
        attributes = []

        for device in pl_states:
            pl = {}
            pl['device_name'] = device
            pl['state'] = pl_states.get(device).get('state')
            pl['since'] = pl_states.get(device).get('since')
            pl_list.append(pl.copy())

        for device in rta_states:
            rta = {}
            rta['device_name'] = device
            rta['state'] = rta_states.get(device).get('state')
            rta['since'] = rta_states.get(device).get('since')
            rta_list.append(rta.copy())

        for device in last_down_states:
            down = {}
            down['device_name'] = device
            down['state'] = last_down_states.get(device).get('state')
            down['since'] = last_down_states.get(device).get('since')
            down_list.append(down.copy())

        return self.render("rules_plugin/prev_states.html",
                           attributes=attributes,
                           rta=rta_list,
                           pl=pl_list,
                           down=down_list)
Ejemplo n.º 5
0
class MySqlLoaderOperator(BaseOperator):
	"""
	transfers memc or redis data to specific mysql table

	:param mysql_conn_id: reference to a specific mysql database
	:type mysql_conn_id: string
	:param sql: the sql code to be executed
	:type sql: Can receive a str representing a sql statement,
		a list of str (sql statements), or reference to a template file.
		Template reference are recognized by str ending in '.sql'
	"""


	@apply_defaults
	def __init__(self,query="",data="",redis_key="",redis_conn_id = "redis_hook_6",mysql_conn_id='mysql_uat',*args, **kwargs):
		super(MySqlLoaderOperator, self).__init__(*args, **kwargs)
		self.mysql_conn_id = mysql_conn_id
		self.sql = query
		self.data = data
		self.redis_key = redis_key			 
		self.redis_hook = RedisHook(redis_conn_id = redis_conn_id)
		self.redis_conn_id = redis_conn_id
	def execute(self,context):
		hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
		if self.data=="" and self.redis_key != "" and self.redis_conn_id != "":
			
			self.data = eval(self.redis_hook.get(self.redis_key))
			#print type(self.data),len(self.data),self.data[0],type(self.data[0])
			if len(self.data) <=0:
				logging.error("Not inserting data as the provided key is empty")
				return 1
		try:
		    #print "Started Exec"
		    conn = hook.get_conn()
		    cursor = conn.cursor()
		    cursor.executemany(self.sql, self.data)
		except Exception,e:
		    logging.info("Exception")
		    traceback.print_exc()
		conn.commit()
		cursor.close()
		conn.close()
Ejemplo n.º 6
0
    'owner': 'wireless',
    'depends_on_past': False,
    'start_date': datetime.now() - timedelta(minutes=2),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1),
    'provide_context': True,
    'catchup': False,
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}
redis_hook_util_10 = RedisHook(redis_conn_id="redis_hook_util_10")
memc_con_cluster = MemcacheHook(memc_cnx_id='memc_cnx')
vrfprv_memc_con = MemcacheHook(memc_cnx_id='vrfprv_memc_cnx')
pub_memc_con = MemcacheHook(memc_cnx_id='pub_memc_cnx')
redis_hook_static_5 = RedisHook(redis_conn_id="redis_hook_5")

INSERT_HEADER = "INSERT INTO %s.performance_utilization"
INSERT_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer ) 
values 
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)

 """

UPDATE_HEADER = "INSERT INTO %s.performance_utilizationstatus"
UPDATE_TAIL = """
Ejemplo n.º 7
0
    def test(self):
        # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.htm
        redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")
        redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")
        redis_hook_10 = RedisHook(redis_conn_id="redis_hook_util_10")
        ulissue_keys = redis_hook_6.get_keys("aggregated_*")
        provis_keys = redis_hook_7.get_keys("aggregated_*")
        utilization_keys = redis_hook_10.get_keys("aggregated_*")
        data_to_page_ul = []
        data_to_page_provis = []
        data_to_page_util = []
        attributes = []
        combined_data = []

        for key in ulissue_keys:
            data = eval(redis_hook_6.get(key))
            for device_dict in data:
                data_to_page_ul.append(device_dict)
        print "Crossed 1"
        for key in provis_keys:
            data = eval(redis_hook_7.get(key))
            for device_dict in data:
                data_to_page_provis.append(device_dict)
        print "Crossed 2"
        for key in utilization_keys:
            data = eval(redis_hook_10.get(key))
            for device_dict in data:
                data_to_page_util.append(device_dict)
        print len(data_to_page_util)
        print "Crossed 3"

        data_to_page_ul.extend(data_to_page_provis)
        data_to_page_ul.extend(data_to_page_util)
        print "Total Records : %s" % len(data_to_page_ul)
        return self.render("rules_plugin/rules.html",
                           attributes=attributes,
                           data=data_to_page_ul)
Ejemplo n.º 8
0
###############Utility functions for PRovision################
import re
from airflow.models import Variable
from airflow.hooks import RedisHook
import time
import logging
import traceback
from airflow.hooks import MemcacheHook
import math
import time

redis_hook_2 = RedisHook(
    redis_conn_id="redis_hook_2")  #DO NOT FLUSH THIS  DB !!!
redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")

rules = eval(Variable.get('rules'))
ul_tech = eval(Variable.get('provision_kpi_technologies'))
memc_con = MemcacheHook(memc_cnx_id='memc_cnx')
operators = eval(Variable.get('operators'))
service_state_type = ['warning', 'critical']
all_device_type_age_dict = {}

for techs_bs in ul_tech:
    techs_ss = ul_tech.get(techs_bs)
    all_device_type_age_dict[techs_ss] = eval(
        redis_hook_2.get("kpi_provis_prev_state_%s" % (techs_ss)))


#############################################SS UL ISSUE###############################################################################################
def calculate_wimax_ss_provision(wimax_ul_rssi, wimax_dl_rssi, wimax_dl_cinr,
                                 wimax_ss_ptx_invent):
Ejemplo n.º 9
0
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}

Q_PUBLIC = "poller_queue"
Q_PRIVATE = "formatting_queue"
Q_OSPF = "poller_queue"
Q_PING = "poller_queue"

PARENT_DAG_NAME = "UL_ISSUE_KPI"
#3-59/5 * * * *
# 3,8,13,18,23,28,33,38,43,48,53,58 * * * *
ul_issue_dag = DAG(dag_id=PARENT_DAG_NAME,
                   default_args=default_args,
                   schedule_interval='*/5 * * * *')
redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")
redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2")
technologies = eval(Variable.get('ul_issue_kpi_technologies'))
machines = eval(Variable.get("system_config_o1"))
devices = eval(Variable.get('hostmk.dict.site_mapping'))
all_sites = []


def init_kpi():
    logging.info("TODO : Check All vars and Airflow ETL Environment here")
    redis_hook_6.flushall("*")
    logging.info("Flushed all in redis_hook_6 connection")


def get_previous_device_states(device_type):
    prev_state = eval(redis_hook_2.get("kpi_ul_prev_state_%s" % device_type))
Ejemplo n.º 10
0
    'start_date': datetime.now() - timedelta(minutes=2),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1),
    'provide_context': True,
    'catchup': False,
    #'queue': 'bash_queue',
    #'pool': 'backfill',
    #'priority_weight': 10,
    #'end_date': datetime(2016, 1, 1),
}

#redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4") #number specifies the DB in Use
redis_cnx = RedisHook(redis_conn_id="redis_hook_7")
memc_cnx = MemcacheHook(memc_cnx_id = 'memc_cnx')


def process_utilization_kpi( 	parent_dag_name, 
				child_dag_name, 
				start_date, 
				schedule_interval,
				celery_queue, 
				technology, 
				devices, 
				attributes
				):

    	site_names = devices.keys()
    	machine_names = list(set([each_site.split('_')[0] for each_site in devices.keys()]))
Ejemplo n.º 11
0
###############Utility functions for UL_issue################
import re
from airflow.models import Variable
from airflow.hooks import RedisHook
import time
import logging
import traceback
from airflow.hooks import  MemcacheHook
import math
import time



redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2") #DO NOT FLUSH THIS  DB !!!
redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")
redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4")
rules = eval(Variable.get('rules'))
ul_tech = eval(Variable.get('ul_issue_kpi_technologies'))
memc_con = MemcacheHook(memc_cnx_id = 'memc_cnx')
operators = eval(Variable.get('operators'))
service_state_type = ['warning','critical']
all_device_type_age_dict ={}

for techs_bs in ul_tech:
	techs_ss = ul_tech.get(techs_bs)
	all_device_type_age_dict[techs_ss] = eval(redis_hook_2.get("kpi_ul_prev_state_%s"%(techs_ss)))
	all_device_type_age_dict[techs_bs] = eval(redis_hook_2.get("kpi_ul_prev_state_%s"%(techs_bs)))

#############################################SS UL ISSUE###############################################################################################
def calculate_wimax_ss_ul_issue(wimax_dl_intrf,wimax_ul_intrf):
	
Ejemplo n.º 12
0
def create_prev_state(**kwargs):
    
    
    #key = ospf1_slave_1_last_pl_info
    data = {}
    data_down = {}
    for conn_id in [1,2,3,4,5,6,7]:
        redis_hook = RedisHook(redis_conn_id="redis_prev_state_%s"%conn_id)
        if conn_id <= 5:
            for site in [1,2,3,4,5,6,7,8]:
                data_redis_down = redis_hook.hgetall("ospf%s_slave_%s_device_down"%(conn_id,site))
                key = "ospf%s_slave_%s_down"%(conn_id,site)
                data_down[key] = data_redis_down
        elif conn_id == 6:
            for site in [1,2,3,4,5,6]:
                data_redis_prv_down = redis_hook.hgetall("vrfprv_slave_%s_device_down"%(site))
                key = "ospf%s_slave_%s_down"%(conn_id,site)
                data_down[key] = data_redis_prv_down
        elif conn_id == 7:
            for site in [1]:
                data_redis_pub_down = redis_hook.hgetall("pub_slave_%s_device_down"%(site))
                key = "ospf%s_slave_%s_down"%(conn_id,site)
                data_down[key] = data_redis_pub_down

        for ds in ['pl','rta']:
            if conn_id <= 5:
                for site in [1,2,3,4,5,6,7,8]:
                    data_redis = redis_hook.hgetall("ospf%s_slave_%s_last_%s_info"%(conn_id,site,ds))
                    key = "ospf%s_slave_%s_%s"%(conn_id,site,ds)
                    data[key] = data_redis
            elif conn_id == 6:
                for site in [1,2,3,4,5,6]:
                    data_redis_prv = redis_hook.hgetall("vrfprv_slave_%s_last_%s_info"%(site,ds))
                    key = "ospf%s_slave_%s_%s"%(conn_id,site,ds)
                    data[key] = data_redis_prv
            elif conn_id == 7:
                for site in [1]:
                    data_redis_pub = redis_hook.hgetall("pub_slave_%s_last_%s_info"%(site,ds))
                    key = "ospf%s_slave_%s_%s"%(conn_id,site,ds)
                    data[key] = data_redis_pub
                
    machine_state_list_pl = {}
    machine_state_list_rta = {}
    machine_state_list_down = {}
    host_mapping = {}

##########################################################################################
    logging.info("Creating IP to Host Mapping from HOST to IP mapping")
    ip_mapping = get_ip_host_mapping()
    for host_name,ip in ip_mapping.iteritems():
        host_mapping[ip] = host_name

   
    logging.info("Mapping Completed for %s hosts"%len(host_mapping))
    ######################################33###################################################
    for key in data:
        site_data = data.get(key)
        #logging.info("FOR  %s is %s"%(key,len(key)))
        for device in site_data:
            host = host_mapping.get(device)
            if "pl" in key: 
                machine_state_list_pl[host] = {'state':eval(site_data.get(device))[0],'since':eval(site_data.get(device))[1]}
            elif "rta" in key:
                machine_state_list_rta[host] = {'state':eval(site_data.get(device))[0],'since':eval(site_data.get(device))[1]}

    i=0
    for key in data_down:

        site_data_down = data_down.get(key)
        #print "%s ===== %s"%(key,len(site_data_down))
        #logging.info("FOR  %s is %s"%(key,len(key)))
        for device in site_data_down:

            if site_data_down.get(device) != None and site_data_down.get(device) != {} :
                try: 
                    machine_state_list_down[device] = {'state':eval(site_data_down.get(device))[0],'since':eval(site_data_down.get(device))[1]}
                except Exception:
                    pass
                    
                    #logging.info("Device not found in the ")
                    #print site_data_down.get(device)
                    #traceback.print_exc()
            else:
                logging.info("Data not present for device %s "%(host))


    logging.info("Total rejected : %s"%(i))
   # print data_down
    print len(machine_state_list_pl),len(machine_state_list_rta)

    main_redis_key = "all_devices_state"
    rta = "all_devices_state_rta"
    down_key = "all_devices_down_state"

    redis_hook_5.set(main_redis_key,str(machine_state_list_pl))
    redis_hook_5.set(rta,str(machine_state_list_rta))
    redis_hook_5.set(down_key,str(machine_state_list_down))
    logging.info("3 keys generated in redis")
Ejemplo n.º 13
0
    'depends_on_past': False,
    'start_date': datetime.now() - timedelta(minutes=2),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1),
    'provide_context': True,
    'catchup': False,
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}

redis_hook_4 = RedisHook(
    redis_conn_id="redis_hook_4")  #number specifies the DB in Use
redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")
redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")

DEBUG_MODE = False
down_devices = []

memc_con_cluster = MemcacheHook(memc_cnx_id='memc_cnx')
vrfprv_memc_con = MemcacheHook(memc_cnx_id='vrfprv_memc_cnx')
pub_memc_con = MemcacheHook(memc_cnx_id='pub_memc_cnx')
set_dependency_for_ss_on_all_machines = False
INSERT_HEADER = "INSERT INTO %s.performance_utilization"
INSERT_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer ) 
values 
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)
Ejemplo n.º 14
0
def calculate_events(parent_dag_name, child_dag_name, start_date, schedule_interval):
    config = eval(Variable.get('system_config'))
    memc_con = MemcacheHook(memc_cnx_id = 'memc_cnx')
    event_rules = eval(Variable.get('event_rules'))
    nw_result_keys = eval(Variable.get("network_memc_key"))
    sv_result_keys = eval(Variable.get("service_memc_key"))
    redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")
    redis_hook_network_alarms = RedisHook(redis_conn_id="redis_hook_network_alarms")
    all_devices_states = get_previous_device_states(memc_con,redis_hook_5)
    
    events_subdag = DAG(
            dag_id="%s.%s" % (parent_dag_name, child_dag_name),
            schedule_interval=schedule_interval,
            start_date=start_date,
        )

    def extract_and_distribute_nw(**kwargs):
        print("Finding Events for network")
        memc_key=kwargs.get('params').get('memc_key')
        network_data = memc_con.get(memc_key)
        if len(network_data) > 0:
            all_pl_rta_trap_list= get_device_alarm_tuple(network_data,all_devices_states,event_rules)
            if len(all_pl_rta_trap_list) > 0:
                machine_name = memc_key.split("_")[1]
                redis_key = 'queue:network:snmptt:%s' % machine_name
                try:
                    redis_hook_network_alarms.rpush(redis_key,all_pl_rta_trap_list)
                    
                except Exception:
                    logging.error("Unable to insert data to redis.")
            else:
                logging.info("No Traps recieved") 
        else:
            logging.info("No Data Found in memC")

    def extract_and_distribute_sv(**kwargs):
        print("Finding Events for service")

    #TODO: We can parallelize this operator as runnning for only machine
    update_refer = PythonOperator(
            task_id="update_redis_refer",
            provide_context=False,
            python_callable=update_device_state_values,
            dag=events_subdag
            )

    if len(nw_result_keys) > 0:
        for key in nw_result_keys :
            slot = "_".join(key.split("_")[1:6])
            event_nw = PythonOperator(
            task_id="discover_events_nw_%s_"%(slot),
            provide_context=True,
            python_callable=extract_and_distribute_nw,
            params={"memc_key":key},
            dag=events_subdag
            )
            event_nw >> update_refer

    if len(sv_result_keys) > 0:
        for sv_keys in sv_result_keys:
            slot = "_".join(sv_keys.split("_")[1:6])
            event_sv = PythonOperator(
            task_id="discover_events_sv_%s"%(slot),
            provide_context=True,
            python_callable=extract_and_distribute_sv,
            params={"memc_key":sv_keys},
            dag=events_subdag
            )
            event_sv >> update_refer
    
    return  events_subdag
Ejemplo n.º 15
0
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}

Q_PUBLIC = "poller_queue"
Q_PRIVATE = "formatting_queue"
Q_OSPF = "poller_queue"
Q_PING = "poller_queue"

PARENT_DAG_NAME = "UTILIZATION_KPI"
utilization_kpi_dag = DAG(dag_id=PARENT_DAG_NAME,
                          default_args=default_args,
                          schedule_interval='4-59/5 * * * *')

redis_hook_util_10 = RedisHook(redis_conn_id="redis_hook_util_10")
redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2")

technologies = eval(Variable.get('utilization_kpi_technologies'))
machines = eval(Variable.get("system_config_no_o1"))
devices = eval(Variable.get('hostmk.dict.site_mapping'))
attributes = eval(Variable.get('utilization_kpi_attributes'))

all_sites = []


def init_kpi():
    logging.info("TODO : Check All vars and Airflow ETL Environment here")
    redis_hook_util_10.flushall("*")
    logging.info("Flushed all in redis_hook_util_10 connection")
Ejemplo n.º 16
0
###############Utility functions for format################

import re
from airflow.models import Variable
from os import listdir
from os.path import isfile, join
import time
import logging
import traceback
import math
from airflow.hooks import MemcacheHook
from airflow.hooks import RedisHook

redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")
memc_con = MemcacheHook(memc_cnx_id='memc_cnx')
hostmk = Variable.get("hostmk.dict")
hostmk = eval(hostmk)


def get_threshold(perf_data):
    """
    Function_name : get_threshold (function for parsing the performance data and storing in the datastructure)

    Args: perf_data performance_data extracted from rrdtool

    Kwargs: None
    return:
           threshold_values (data strucutre containing the performance_data for all data sources)
    Exception:
           None
    """
Ejemplo n.º 17
0
    # 'end_date': datetime(2016, 1, 1),
}

Q_PUBLIC = "poller_queue"
Q_PRIVATE = "formatting_queue"
Q_OSPF = "poller_queue"
Q_PING = "poller_queue"

PARENT_DAG_NAME = "PROVISION_KPI"

#3-59/5 * * * *
# 3,8,13,18,23,28,33,38,43,48,53,58 * * * *
provision_kpi_dag = DAG(dag_id=PARENT_DAG_NAME,
                        default_args=default_args,
                        schedule_interval='*/5 * * * *')
redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")
redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2")
technologies = eval(Variable.get('provision_kpi_technologies'))
machines = eval(Variable.get("system_config_o1"))
devices = eval(Variable.get('hostmk.dict.site_mapping'))
all_sites = []


def init_kpi():
    logging.info("TODO : Check All vars and Airflow ETL Environment here")
    redis_hook_7.flushall("*")
    logging.info("Flushed all in redis_hook_7 connection")


def get_previous_device_states(device_type):
    prev_state = eval(redis_hook_2.get("kpi_ul_prev_state_%s" % device_type))
Ejemplo n.º 18
0
###############Utility functions for PRovision################
import re
from airflow.models import Variable
from airflow.hooks import RedisHook
import time
import logging
import traceback
from airflow.hooks import MemcacheHook
import math
import time

redis_hook_2 = RedisHook(
    redis_conn_id="redis_hook_2")  #DO NOT FLUSH THIS  DB !!!
redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")

rules = eval(Variable.get('rules'))
util_tech = eval(Variable.get('utilization_kpi_technologies'))

operators = eval(Variable.get('operators'))
service_state_type = ['warning', 'critical']
all_device_type_age_dict = {}
backhaul_inventory = {}
basestation_inventory = {}
for device in util_tech:
    try:
        all_device_type_age_dict[device] = eval(
            redis_hook_2.get("kpi_util_prev_state_%s" % (device)))
    except Exception:
        logging.error("No Prev states found")
        all_device_type_age_dict[device] = {}
backhaul_inventory = eval(redis_hook_2.get("backhaul_capacities"))
Ejemplo n.º 19
0
    'email_on_retry': False,
    'retries': 0,
    'retry_delay': timedelta(minutes=1),
    'provide_context': True,
    'catchup': False,
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}

##############################DAG CONFIG ENDS###############################################33333################################
aggregate_nw_tasks = {}
aggregate_sv_tasks = {}
logging.basicConfig(level=logging.ERROR)
redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4")
rules = eval(Variable.get('rules'))
memc_con = MemcacheHook(memc_cnx_id='memc_cnx')
exclude_network_datasource = eval(Variable.get("exclude_network_datasource"))
databases = eval(Variable.get('databases'))
redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")
redis_hook_2 = RedisHook(redis_conn_id="redis_cnx_2")
redis_availablity_0 = RedisHook(redis_conn_id="redis_availablity_0")
all_devices_states = get_previous_device_states(redis_hook_5)
all_devices_states_rta = get_previous_device_states(redis_hook_5, "rta")
redis_hook_network_alarms = RedisHook(
    redis_conn_id="redis_hook_network_alarms")
event_rules = eval(Variable.get('event_rules'))
operators = eval(Variable.get('operators'))  #get operator Dict from
config = eval(Variable.get("system_config"))
debug_mode = eval(Variable.get("debug_mode"))
Ejemplo n.º 20
0
    'owner': 'wireless',
    'depends_on_past': False,
    'start_date': datetime.now(),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1),
    'provide_context': True,
    'catchup': False,
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}
redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4")
TOPO_DB_COLUMNS = "device_name,service_name,data_source,machine_name,site_name,ip_address,mac_address,sector_id,connected_device_ip,connected_device_mac,sys_timestamp,check_timestamp,age,refer"
topo_query = "INSERT INTO performance_topology "
topo_query += "( device_name,service_name,data_source,machine_name,site_name,ip_address,mac_address,sector_id,connected_device_ip,connected_device_mac,sys_timestamp,check_timestamp,refer) VALUES (%(device_name)s,%(service_name)s,%(data_source)s,%(machine_name)s,%(site_name)s,%(ip_address)s,%(mac_address)s,%(sector_id)s,%(connected_device_ip)s,%(connected_device_mac)s,%(sys_timestamp)s,%(check_timestamp)s,%(refer)s)"

#################################Init Global Var ends###################################################################################


def topology_etl(parent_dag_name, child_dag_name, start_date,
                 schedule_interval):
    config = eval(Variable.get('system_config'))

    dag_subdag_topo = DAG(
        dag_id="%s.%s" % (parent_dag_name, child_dag_name),
        schedule_interval=schedule_interval,
        start_date=start_date,
Ejemplo n.º 21
0
    'retries': 0,
    'retry_delay': timedelta(minutes=1),
    'catchup': False,
    'provide_context': True,
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
     
}
#redis_hook = RedisHook(redis_conn_id="redis_4")
PARENT_DAG_NAME = "GETSTATES"

prev_state_dag=DAG(dag_id=PARENT_DAG_NAME, default_args=default_args, schedule_interval='@once')
config = eval(Variable.get('system_config'))
redis_hook_5 = RedisHook(redis_conn_id="redis_hook_2")

memc_con = MemcacheHook(memc_cnx_id = 'memc_cnx')
vrfprv_memc_con  = MemcacheHook(memc_cnx_id = 'vrfprv_memc_cnx')
pub_memc_con  = MemcacheHook(memc_cnx_id = 'pub_memc_cnx')
def create_prev_state(**kwargs):
    
    
    #key = ospf1_slave_1_last_pl_info
    data = {}
    data_down = {}
    for conn_id in [1,2,3,4,5,6,7]:
        redis_hook = RedisHook(redis_conn_id="redis_prev_state_%s"%conn_id)
        if conn_id <= 5:
            for site in [1,2,3,4,5,6,7,8]:
                data_redis_down = redis_hook.hgetall("ospf%s_slave_%s_device_down"%(conn_id,site))