コード例 #1
0
ファイル: rules.py プロジェクト: vipul-tm/DAGS-PROD
    def test(self):
        # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.htm
        redis_hook_5 = RedisHook(redis_conn_id="redis_hook_5")

        pl_states = eval(redis_hook_5.get("all_devices_state"))
        rta_states = eval(redis_hook_5.get("all_devices_state_rta"))
        last_down_states = eval(redis_hook_5.get("all_devices_down_state"))

        pl_list = []
        rta_list = []
        down_list = []

        data_to_page = []
        attributes = []

        for device in pl_states:
            pl = {}
            pl['device_name'] = device
            pl['state'] = pl_states.get(device).get('state')
            pl['since'] = pl_states.get(device).get('since')
            pl_list.append(pl.copy())

        for device in rta_states:
            rta = {}
            rta['device_name'] = device
            rta['state'] = rta_states.get(device).get('state')
            rta['since'] = rta_states.get(device).get('since')
            rta_list.append(rta.copy())

        for device in last_down_states:
            down = {}
            down['device_name'] = device
            down['state'] = last_down_states.get(device).get('state')
            down['since'] = last_down_states.get(device).get('since')
            down_list.append(down.copy())

        return self.render("rules_plugin/prev_states.html",
                           attributes=attributes,
                           rta=rta_list,
                           pl=pl_list,
                           down=down_list)
コード例 #2
0
ファイル: rules.py プロジェクト: vipul-tm/DAGS-PROD
    def test(self):
        # in this example, put your test_plugin/test.html template at airflow/plugins/templates/test_plugin/test.htm
        redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")
        redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")
        redis_hook_10 = RedisHook(redis_conn_id="redis_hook_util_10")
        ulissue_keys = redis_hook_6.get_keys("aggregated_*")
        provis_keys = redis_hook_7.get_keys("aggregated_*")
        utilization_keys = redis_hook_10.get_keys("aggregated_*")
        data_to_page_ul = []
        data_to_page_provis = []
        data_to_page_util = []
        attributes = []
        combined_data = []

        for key in ulissue_keys:
            data = eval(redis_hook_6.get(key))
            for device_dict in data:
                data_to_page_ul.append(device_dict)
        print "Crossed 1"
        for key in provis_keys:
            data = eval(redis_hook_7.get(key))
            for device_dict in data:
                data_to_page_provis.append(device_dict)
        print "Crossed 2"
        for key in utilization_keys:
            data = eval(redis_hook_10.get(key))
            for device_dict in data:
                data_to_page_util.append(device_dict)
        print len(data_to_page_util)
        print "Crossed 3"

        data_to_page_ul.extend(data_to_page_provis)
        data_to_page_ul.extend(data_to_page_util)
        print "Total Records : %s" % len(data_to_page_ul)
        return self.render("rules_plugin/rules.html",
                           attributes=attributes,
                           data=data_to_page_ul)
コード例 #3
0
class MySqlLoaderOperator(BaseOperator):
	"""
	transfers memc or redis data to specific mysql table

	:param mysql_conn_id: reference to a specific mysql database
	:type mysql_conn_id: string
	:param sql: the sql code to be executed
	:type sql: Can receive a str representing a sql statement,
		a list of str (sql statements), or reference to a template file.
		Template reference are recognized by str ending in '.sql'
	"""


	@apply_defaults
	def __init__(self,query="",data="",redis_key="",redis_conn_id = "redis_hook_6",mysql_conn_id='mysql_uat',*args, **kwargs):
		super(MySqlLoaderOperator, self).__init__(*args, **kwargs)
		self.mysql_conn_id = mysql_conn_id
		self.sql = query
		self.data = data
		self.redis_key = redis_key			 
		self.redis_hook = RedisHook(redis_conn_id = redis_conn_id)
		self.redis_conn_id = redis_conn_id
	def execute(self,context):
		hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
		if self.data=="" and self.redis_key != "" and self.redis_conn_id != "":
			
			self.data = eval(self.redis_hook.get(self.redis_key))
			#print type(self.data),len(self.data),self.data[0],type(self.data[0])
			if len(self.data) <=0:
				logging.error("Not inserting data as the provided key is empty")
				return 1
		try:
		    #print "Started Exec"
		    conn = hook.get_conn()
		    cursor = conn.cursor()
		    cursor.executemany(self.sql, self.data)
		except Exception,e:
		    logging.info("Exception")
		    traceback.print_exc()
		conn.commit()
		cursor.close()
		conn.close()
コード例 #4
0
redis_hook_2 = RedisHook(
    redis_conn_id="redis_hook_2")  #DO NOT FLUSH THIS  DB !!!
redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")

rules = eval(Variable.get('rules'))
util_tech = eval(Variable.get('utilization_kpi_technologies'))

operators = eval(Variable.get('operators'))
service_state_type = ['warning', 'critical']
all_device_type_age_dict = {}
backhaul_inventory = {}
basestation_inventory = {}
for device in util_tech:
    try:
        all_device_type_age_dict[device] = eval(
            redis_hook_2.get("kpi_util_prev_state_%s" % (device)))
    except Exception:
        logging.error("No Prev states found")
        all_device_type_age_dict[device] = {}
backhaul_inventory = eval(redis_hook_2.get("backhaul_capacities"))
basestation_inventory = eval(redis_hook_2.get("basestation_capacities"))


#############################################SS UL ISSUE###############################################################################################
def calculate_wimax_utilization(wimax_util_data, wimax_pmp_bandwidth,
                                capacity):

    if wimax_pmp_bandwidth and isinstance(wimax_pmp_bandwidth, basestring):
        wimax_pmp_bandwidth = eval(wimax_pmp_bandwidth)
    if wimax_util_data and isinstance(wimax_util_data, basestring):
        wimax_util_data = eval(wimax_util_data)
コード例 #5
0
redis_hook_2 = RedisHook(
    redis_conn_id="redis_hook_2")  #DO NOT FLUSH THIS  DB !!!
redis_hook_7 = RedisHook(redis_conn_id="redis_hook_7")

rules = eval(Variable.get('rules'))
ul_tech = eval(Variable.get('provision_kpi_technologies'))
memc_con = MemcacheHook(memc_cnx_id='memc_cnx')
operators = eval(Variable.get('operators'))
service_state_type = ['warning', 'critical']
all_device_type_age_dict = {}

for techs_bs in ul_tech:
    techs_ss = ul_tech.get(techs_bs)
    all_device_type_age_dict[techs_ss] = eval(
        redis_hook_2.get("kpi_provis_prev_state_%s" % (techs_ss)))


#############################################SS UL ISSUE###############################################################################################
def calculate_wimax_ss_provision(wimax_ul_rssi, wimax_dl_rssi, wimax_dl_cinr,
                                 wimax_ss_ptx_invent):
    ss_state = "normal"
    try:
        if (wimax_ul_rssi !=None and wimax_dl_rssi !=None and wimax_ss_ptx_invent != None and (int(wimax_ul_rssi) < -83 or \
         int(wimax_dl_rssi) < -83 and int(wimax_ss_ptx_invent) > 20)):
            ss_state = "los"
            state = 0
            state_string = "ok"
        elif (wimax_ul_rssi !=None and wimax_dl_rssi != None and wimax_ss_ptx_invent != None and (int(wimax_ul_rssi) < -83 or \
         int(wimax_dl_rssi) < -83 and int (wimax_ss_ptx_invent) <= 20)):
            ss_state = "need_alignment"
コード例 #6
0


redis_hook_2 = RedisHook(redis_conn_id="redis_hook_2") #DO NOT FLUSH THIS  DB !!!
redis_hook_6 = RedisHook(redis_conn_id="redis_hook_6")
redis_hook_4 = RedisHook(redis_conn_id="redis_hook_4")
rules = eval(Variable.get('rules'))
ul_tech = eval(Variable.get('ul_issue_kpi_technologies'))
memc_con = MemcacheHook(memc_cnx_id = 'memc_cnx')
operators = eval(Variable.get('operators'))
service_state_type = ['warning','critical']
all_device_type_age_dict ={}

for techs_bs in ul_tech:
	techs_ss = ul_tech.get(techs_bs)
	all_device_type_age_dict[techs_ss] = eval(redis_hook_2.get("kpi_ul_prev_state_%s"%(techs_ss)))
	all_device_type_age_dict[techs_bs] = eval(redis_hook_2.get("kpi_ul_prev_state_%s"%(techs_bs)))

#############################################SS UL ISSUE###############################################################################################
def calculate_wimax_ss_ul_issue(wimax_dl_intrf,wimax_ul_intrf):
	
	if wimax_ul_intrf and wimax_dl_intrf:
		
		if len(wimax_dl_intrf) == 2 and wimax_dl_intrf[0].lower() == 'critical' and wimax_dl_intrf[1].lower() == 'critical':
			ul_issue = 0 
			state_string = "ok"
		elif len(wimax_ul_intrf) == 2 and wimax_ul_intrf[0].lower() in service_state_type and wimax_ul_intrf[1].lower() in service_state_type:
			ul_issue = 1
			state_string = "ok"
		elif len(wimax_ul_intrf) == 2  and len(wimax_dl_intrf) == 2:
			ul_issue = 0
コード例 #7
0
 values
  (%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s) 
  ON DUPLICATE KEY UPDATE machine_name = VALUES(machine_name),current_value = VALUES(current_value),age=VALUES(age),site_name=VALUES(site_name),critical_threshold=VALUES(critical_threshold),severity=VALUES(severity),sys_timestamp=VALUES(sys_timestamp),ip_address=VALUES(ip_address),warning_threshold=VALUES(warning_threshold),check_timestamp=VALUES(check_timestamp),refer=VALUES(refer)
"""
ERROR_DICT = {
    404: 'Device not found yet',
    405: 'No SS Connected to BS-BS is not skipped'
}
ERROR_FOR_DEVICE_OMITTED = [404]
kpi_rules = eval(Variable.get("kpi_rules"))
DEBUG = False
sv_to_ds_mapping = {}
O7_CALC_Q = "calculation_q"
#O7_CALC_Q = "poller_queue"
down_and_unresponsive_devices = eval(
    redis_hook_static_5.get("current_down_devices_all"))


def process_utilization_kpi(
        parent_dag_name, child_dag_name, start_date, schedule_interval,
        celery_queue, ss_tech_sites, hostnames_ss_per_site, ss_name,
        utilization_attributes, config_sites
):  #here config site is list of all sites in system_config var

    utilization_kpi_subdag_dag = DAG(
        dag_id="%s.%s" % (parent_dag_name, child_dag_name),
        schedule_interval=schedule_interval,
        start_date=start_date,
    )
    for service in utilization_attributes:
        sv_to_ds_mapping[service.get("service_name")] = {