def offline_restore_eng(config, mskengname, username, password, protocol, backup_dir): """ This module will offline restore engine from backups""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('mskengname = {0}'.format(mskengname)) print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) print_debug('backup_dir = {0}'.format(backup_dir)) try: mskai = aimasking(config, mskengname=mskengname, username=username, password=password, protocol=protocol, backup_dir=backup_dir) mskai.offline_restore_eng() except Exception as e: print("Error in MSK module") print(str(e)) return
def get_api_response(self, ip_address, cookies, apicall, port=80): protocol = self.protocol print_debug("protocol = {}, port ={}".format(protocol, port)) if protocol == "https": port = 443 print_debug("New protocol = {}, port ={}".format(protocol, port)) api_url_base = '{}://{}:{}/resources/json/delphix/'.format( protocol, ip_address, port) headers = self.headers api_url = '{0}{1}'.format(api_url_base, apicall) try: response = requests.get(api_url, cookies=cookies, headers=headers, verify=False) if response.status_code == 200: data = json.loads(response.content.decode('utf-8')) if data['status'] == "OK": return data['result'] else: print_debug( "Engine {} : Error fetching data".format(ip_address)) return None else: print_debug( "Engine {} : Error fetching data".format(ip_address)) return None except: print_debug("Engine {} : Error fetching data".format(ip_address)) return None
def del_engine(self): csvdir = self.outputdir newenginelist = [] try: i = 0 if os.path.exists(self.enginelistfile): engine_list = self.create_dictobj(self.enginelistfile) for engine in engine_list: if self.mskengname != engine['ip_address']: newenginelist.append(engine) else: i = 1 print("Engine {} deleted from pool".format( self.mskengname)) if i == 1: f = open(self.enginelistfile, "w") f.write("{},{},{}\n".format("ip_address", "totalgb", "systemgb")) f.close() f = open(self.enginelistfile, "a") for engine in newenginelist: f.write("{},{},{}\n".format(engine['ip_address'], engine['totalgb'], engine['systemgb'])) f.close() else: print("Engine {} does not exists in pool".format( self.mskengname)) else: print("File {} does not exists".format(self.enginelistfile)) except Exception as e: print_debug(str(e)) print_debug("Error deleting engine {} from file {}".format( self.mskengname, self.enginelistfile))
def add_engine(self): csvdir = self.outputdir try: if os.path.exists(self.enginelistfile): engine_list = self.create_dictobj(self.enginelistfile) for engine in engine_list: if self.mskengname == engine['ip_address']: print("Engine {} already exists in pool".format( self.mskengname)) print( "Please use upd-engine OR del-engine and add-engine module" ) exit() f = open(self.enginelistfile, "a") f.write("{},{},{}\n".format(self.mskengname, self.totalgb, self.systemgb)) f.close() else: f = open(self.enginelistfile, "w") f.write("{},{},{}\n".format("ip_address", "totalgb", "systemgb")) f.write("{},{},{}\n".format(self.mskengname, self.totalgb, self.systemgb)) f.close() print("Engine {} successfully added to pool".format( self.mskengname)) except Exception as e: print_debug(str(e)) print_debug("Error adding engine {} to file {}".format( self.mskengname, self.enginelistfile))
def gen_cpu_file(self): f = open(self.enginecpulistfile, "w") f.write("{},{}\n".format("ip_address", "cpu")) f.close() dlpxconfig = loadveconfig() config_file_path = self.config_file_path dxtoolkit_path = self.dxtoolkit_path dlpxconfig.get_config(config_file_path) for engine in dlpxconfig.dlpx_engines: try: # print_debug(dlpxconfig.dlpx_engines[engine]) # self.get_cpu_raw_data(dlpxconfig.dlpx_engines[engine]) # print("engine = {}".format(engine)) print_debug( "dxtoolkit_path: {}, config_file_path:{}, engine: {}". format(dxtoolkit_path + '/dx_get_cpu', config_file_path, engine)) out = subprocess.Popen([ dxtoolkit_path + '/dx_get_cpu', '-d', engine, '-configfile', config_file_path ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) #print_debug("out = {}".format(out)) stdout, stderr = out.communicate() print_debug("stdout: {} ,stderr: {}".format(stdout, stderr)) r1 = re.findall(r"Can't connect", stdout.decode("utf-8")) if not r1: rs = stdout.split()[0] rs = rs.decode("utf-8") print_debug("rs: {}".format(rs)) if rs == "OK:" or "CRITICAL:" or "WARNING:": cpuvalue = stdout.split()[-1:][0] cpuvalue = cpuvalue.decode("utf-8") f = open(self.enginecpulistfile, "a") f.write("{},{}\n".format(engine, cpuvalue)) f.close() print_debug( "Engine {} : pulled cpu data - OK".format(engine)) else: print("Engine {} : Unable to pull cpu data".format( engine)) f = open(self.enginecpulistfile, "a") f.write("{},{}\n".format(engine, "0")) f.close() else: print( "Engine {} : Unable to connect and pull cpu data. Defualt 0" .format(engine)) f = open(self.enginecpulistfile, "a") f.write("{},{}\n".format(engine, "0")) f.close() except: #print_debug("Engine {} : Error for get_cpu_raw_data".format(engine['ip_address'])) print_debug( "Engine {} : Unable to pull cpu data".format(engine))
def pull_jobexeclist(self): csvdir = self.outputdir try: if os.path.exists(self.jobexeclistfile): os.remove(self.jobexeclistfile) fe = open(self.jobexeclistfile, "w") fe.write("{},{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address", "jobstatus")) fe.close() else: fe = open(self.jobexeclistfile, "w") fe.write("{},{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address", "jobstatus")) fe.close() except: print_debug("Error while deleting file ", self.jobexeclistfile) engine_list = self.create_dictobj(self.enginelistfile) for engine in engine_list: engine_name = engine['ip_address'] apikey = self.get_auth_key(engine_name) if apikey is not None: apicall = "environments?page_number=1" envlist_response = self.get_api_response( engine_name, apikey, apicall) for envname in envlist_response['responseList']: jobapicall = "masking-jobs?page_number=1&environment_id={}".format( envname['environmentId']) joblist_response = self.get_api_response( engine_name, apikey, jobapicall) joblist_responselist = joblist_response['responseList'] for joblist in joblist_responselist: fe = open(self.jobexeclistfile, "a") jobexecapicall = "executions?job_id={}&page_number=1".format( joblist['maskingJobId']) jobexeclist_response = self.get_api_response( engine_name, apikey, jobexecapicall) jobexeclist_responselist = jobexeclist_response[ 'responseList'] if jobexeclist_responselist != []: latestexecid = max( jobexeclist_responselist, key=lambda ev: ev['executionId']) if latestexecid['status'] == "RUNNING": fe.write("{},{},{},{},{},{},{},{}\n".format( joblist['maskingJobId'], joblist['jobName'], joblist['maxMemory'], '0', envname['environmentId'], envname['environmentName'], engine_name, latestexecid['status'])) fe.close() print_debug("File {} successfully generated".format( self.jobexeclistfile))
def post_api_response(self, ip_address, api_token, apicall, body, port=80): api_url_base = 'http://{}:{}/masking/api/'.format(ip_address, port) headers = { 'Content-Type': 'application/json', 'Authorization': '{0}'.format(api_token) } api_url = '{0}{1}'.format(api_url_base, apicall) response = requests.post(api_url, headers=headers, json=body) if response.status_code == 200: data = json.loads(response.content.decode('utf-8')) return data else: print_debug(response.content.decode('utf-8')) return None
def test_connectors(config, mskengname, username, password, protocol): """ This module will cleanup engine""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('mskengname = {0}'.format(mskengname)) print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) try: mskai = aimasking(config, mskengname=mskengname, username=username, password=password, protocol=protocol) mskai.test_all_connectors() except Exception as e: print("Error in MSK module") print(str(e)) return
def run_job(self): if self.config.debug: print_debug("Parameter List:") print_debug(" jobid = {}".format(self.jobid)) print_debug(" envname = {}".format(self.envname)) print_debug(" run = {}".format(self.run)) # print_debug(" password= {}".format(self.password)) csvdir = self.outputdir self.read_configs()
def list_engine(self): csvdir = self.outputdir try: if os.path.exists(self.enginelistfile): engine_list = self.create_dictobj(self.enginelistfile) print('{0:>1}{1:<35}{2:>20}{3:>20}'.format( " ", "EngineName", "Total Memory(GB)", "System Memory(GB)")) for engine in engine_list: print('{0:>1}{1:<35}{2:>20}{3:>20}'.format( " ", engine['ip_address'], engine['totalgb'], engine['systemgb'])) print(" ") else: print("No Engine found in pool".format(self.mskengname)) except Exception as e: print_debug(str(e)) print_debug("Not able to open file {}".format(self.enginelistfile))
def add_engine(config, mskengname, totalgb, systemgb): """ This module will add engine to pool""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') globals.arguments['--debug'] = config.debug globals.arguments['--config'] = './dxtools.conf' mskai = aimasking(config, mskengname=mskengname, totalgb=totalgb, systemgb=systemgb) mskai.add_engine()
def __init__(self, config, **kwargs): self.scriptname = os.path.basename(__file__) self.scriptdir = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__))) # self.scriptdir = os.path.dirname(os.path.abspath(__file__)) self.enginelistfile = globals.enginelistfile self.joblistfile = globals.joblistfile self.jobexeclistfile = globals.jobexeclistfile self.qualifiedengineslistfile = globals.qualifiedengineslistfile self.enginecpulistfile = globals.enginecpulistfile self.config = config self.df_enginelist = pd.DataFrame() self.df_joblist = pd.DataFrame() self.df_jobexeclist = pd.DataFrame() self.df_joblistunq = pd.DataFrame() self.df_enginecpulist = pd.DataFrame() if "jobid" in kwargs.keys(): self.jobid = kwargs['jobid'] if "envname" in kwargs.keys(): self.envname = kwargs['envname'] if "run" in kwargs.keys(): self.run = kwargs['run'] if "password" in kwargs.keys(): self.password = kwargs['password'] if "mskengname" in kwargs.keys(): self.mskengname = kwargs['mskengname'] if "totalgb" in kwargs.keys(): self.totalgb = kwargs['totalgb'] if "systemgb" in kwargs.keys(): self.systemgb = kwargs['systemgb'] self.outputdir = os.path.join(self.scriptdir, 'output') self.outputfilename = 'output.txt' self.report_output = os.path.join(self.scriptdir, 'output', self.outputfilename) try: os.stat(self.outputdir) except: os.mkdir(self.outputdir) if self.config.debug: print_debug("Created directory {}".format(self.outputdir))
def get_auth_key(self, ip_address, port=80): api_url_base = 'http://{}:{}/masking/api/'.format(ip_address, port) headers = {'Content-Type': 'application/json'} api_url = '{0}login'.format(api_url_base) credentials = {"username": "******", "password": "******"} # print_debug('{},{},{},{},{},{}'.format(ip_address,port,api_url_base,headers,api_url,credentials)) try: response = requests.post(api_url, headers=headers, json=credentials) if response.status_code == 200: data = json.loads(response.content.decode('utf-8')) # print_debug (data['Authorization']) return data['Authorization'] else: print_debug("Error generating key {}".format(ip_address)) return None except: print_debug("Error connecting engine {}".format(ip_address)) return None
def login_api_session(self, ip_address, cookies, apicall, payload, port=80): protocol = self.protocol print_debug("protocol = {}, port ={}".format(protocol, port)) if protocol == "https": port = 443 print_debug("New protocol = {}, port ={}".format(protocol, port)) api_url_base = '{}://{}:{}/resources/json/delphix/'.format( protocol, ip_address, port) headers = self.headers api_url = '{0}{1}'.format(api_url_base, apicall) try: response = requests.post(api_url, cookies=cookies, headers=headers, json=payload, verify=False) if response.status_code == 200: data = json.loads(response.content.decode('utf-8')) if data['status'] == "OK": cookies = {'JSESSIONID': response.cookies['JSESSIONID']} return cookies else: print_debug( "Engine {} : Error logging engine".format(ip_address)) return None else: print_debug( "Engine {} : Error logging engine".format(ip_address)) return None except: print_debug("Engine {} : Error logging engine".format(ip_address)) return None
def __init__(self, config, **kwargs): # self.scriptname = os.path.basename(__file__) # self.scriptdir = os.path.dirname(os.path.abspath(__file__)) self.enginelistfile = globals.enginelistfile self.enginecpulistfile = globals.enginecpulistfile self.config = config if "config_file_path" in kwargs.keys(): self.config_file_path = kwargs['config_file_path'] if "outputdir" in kwargs.keys(): self.outputdir = kwargs['outputdir'] if "protocol" in kwargs.keys(): self.protocol = kwargs['protocol'] if "dxtoolkit_path" in kwargs.keys(): self.dxtoolkit_path = kwargs['dxtoolkit_path'] self.headers = {'Content-Type': 'application/json'} try: os.stat(self.outputdir) except: os.mkdir(self.outputdir) if self.config.debug: print_debug("Created directory {}".format(self.outputdir))
def group_job_mem_usage(self, key, sumcol, mydictname): try: aggregate_list = [] c = Counter() for v in mydictname: if v['jobstatus'] == 'RUNNING': c[v[key]] += int(v[sumcol]) aggregate_list = [{ key: key1, 'totalusedmemory': sumcol1 } for key1, sumcol1 in c.items()] if aggregate_list is None: print_debug("Returned None for aggregate job usage data") return None elif aggregate_list == []: print_debug("Returned [] for aggregate job usage data") return None else: return aggregate_list except Exception as e: print_debug("ERROR : Unable to aggregate job usage data") print_debug(e) return None
def pull_jobpoolexeclist(config, username, password, protocol): """ This module will pull job pool exec list from all engines""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) try: mskai = aimasking(config, username=username, password=password, protocol=protocol) mskai.pull_jobpoolexeclist() except Exception as e: print("Error in MSK module") print(str(e)) return
def create_api_session(self, ip_address, port=80): protocol = self.protocol print_debug("protocol = {}, port ={}".format(protocol, port)) if protocol == "https": port = 443 print_debug("New protocol = {}, port ={}".format(protocol, port)) apiversion = { 'type': 'APISession', 'version': { 'type': 'APIVersion', "major": 1, "minor": 9, "micro": 3 } } api_url_base = '{}://{}:{}/resources/json/delphix/'.format( protocol, ip_address, port) print_debug("api_url_base = {}".format(api_url_base)) headers = self.headers api_url = '{0}session'.format(api_url_base) try: response = requests.post(api_url, headers=headers, json=apiversion, verify=False) if response.status_code == 200: data = json.loads(response.content.decode('utf-8')) if data['status'] == "OK": cookies = {'JSESSIONID': response.cookies['JSESSIONID']} return cookies else: print_debug("Engine {} : Error connecting engine".format( ip_address)) return None else: print_debug( "Engine {} : Error connecting engine".format(ip_address)) return None except: print_debug( "Engine {} : Error connecting engine".format(ip_address)) return None
def list_eng_usage(config, username, password, protocol, mock, dxtoolkit_path): """ This module will find green engines""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('mock = {0}'.format(mock)) print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) print_debug('dxtoolkit_path = {0}'.format(dxtoolkit_path)) globals.arguments['--debug'] = config.debug globals.arguments['--config'] = './dxtools.conf' globals.arguments['--all'] = True globals.arguments['--engine'] = None globals.arguments['--logdir'] = './dx_skel.log' globals.arguments['--parallel'] = None globals.arguments['--poll'] = '10' globals.arguments['--version'] = False globals.arguments['--single_thread'] = True globals.arguments['--dxtoolkit_path'] = dxtoolkit_path try: mskai = aimasking(config, mock=mock, username=username, password=password, protocol=protocol) if not mock: mskai.pull_jobexeclist() except Exception as e: print("Error in MSK module") print(str(e)) return try: print_debug(" ") print_debug("Capture CPU usage data...") scriptdir = os.path.dirname(os.path.abspath(__file__)) outputdir = os.path.join(scriptdir, 'output') print_debug("dxtoolkit_path: {}".format(dxtoolkit_path)) aive = virtualization(config, config_file_path='./dxtools.conf', scriptdir=scriptdir, outputdir=outputdir, protocol=protocol, dxtoolkit_path=dxtoolkit_path) print_debug("dxtoolkit_path: {}".format(dxtoolkit_path)) aive.gen_cpu_file() print_debug("Capture CPU usage data : done") print_debug(" ") except: print("Error in VE module") return try: mskai = aimasking(config, mock=mock, username=username, password=password, protocol=protocol) mskai.list_eng_usage() except Exception as e: print("Error in MSK module") print(str(e)) return
def get_cpu_raw_data(self, engine): # engine = {'ip_address' : 'ajaydlpx6pri.dcenter.delphix.com' , 'username' : 'admin' , 'password' : 'delphix'} cookies = self.create_api_session(engine['ip_address'], port=80) if cookies is not None: print_debug("Engine {} : Session created".format( engine['ip_address'])) apicall = "login" payload = { "type": "LoginRequest", "username": engine['username'], "password": engine['password'] } logincookies = self.login_api_session(engine['ip_address'], cookies, apicall, payload, port=80) if logincookies is not None: print_debug("Engine {} : Login Successful".format( engine['ip_address'])) apicall = "analytics" analytics_list = self.get_api_response(engine['ip_address'], logincookies, apicall, port=80) if analytics_list is not None: cpu_data_list = [] for slice in analytics_list: if slice['name'] == 'default.cpu': five_minute = timedelta(minutes=5) end_date = datetime.utcnow() # end_date = datetime.today() start_date = end_date - five_minute start_date_isostr = "{}T{}.000Z".format( start_date.strftime('%Y-%m-%d'), start_date.strftime('%H:%M:%S')) end_date_isostr = "{}T{}.000Z".format( end_date.strftime('%Y-%m-%d'), end_date.strftime('%H:%M:%S')) print_debug( 'Engine {} : Parameters ({}, {}, {}, {})'. format(engine['ip_address'], slice['reference'], "resolution=1", start_date_isostr, end_date_isostr)) cpu_analytics_list = [] try: apicall = "analytics/{}/getData?&resolution={}&numberofDatapoints={}&startTime={}&endTime={}".format( slice['reference'], "1", "10000", start_date_isostr, end_date_isostr) cpu_analytics_data = self.get_api_response( engine['ip_address'], logincookies, apicall, port=80) if cpu_analytics_data == []: print_debug( "Engine {} : No data found for engine". format(engine['ip_address'])) else: for row in cpu_analytics_data[ 'datapointStreams'][0][ 'datapoints']: ts = row['timestamp'].split( ".")[0].replace("T", " ") idle = 0 if row['idle'] <= 0 else row[ 'idle'] user = 0 if row['user'] <= 0 else row[ 'user'] kernel = 1 if row[ 'kernel'] <= 0 else row['kernel'] ttl_cpu = idle + kernel + user util = 0 if (ttl_cpu == 0) else (( (user + kernel) / (ttl_cpu)) * 100) cpu_data_dict = { "ts": ts, "cpu": float(util) } cpu_data_list.append(cpu_data_dict) # print_debug(round(mean(k['cpu'] for k in cpu_data_list),2)) cpu_usage = round( mean(k['cpu'] for k in cpu_data_list), 2) f = open(self.enginecpulistfile, "a") f.write("{},{}\n".format( engine['ip_address'], cpu_usage)) f.close() print_debug( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) except Exception as e: print_debug( "Engine {} : Unable to pull cpu_analytics_data" .format(engine['ip_address'])) return else: print_debug("Engine {} : Unable to pull data".format( engine['ip_address'])) else: print_debug("Engine {} : Unable to login".format( engine['ip_address']))
def run_job(config, jobname, envname, run, mock, username, password, protocol, dxtoolkit_path): """ This module will execute masking job on best candidate engine""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('jobname = {0}'.format(jobname)) print_debug('envname = {0}'.format(envname)) print_debug('run = {0}'.format(run)) print_debug('mock = {0}'.format(mock)) print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) print_debug('dxtoolkit_path = {0}'.format(dxtoolkit_path)) globals.arguments['--debug'] = config.debug globals.arguments['--config'] = './dxtools.conf' globals.arguments['--all'] = True globals.arguments['--engine'] = None globals.arguments['--logdir'] = './dx_skel.log' globals.arguments['--parallel'] = None globals.arguments['--poll'] = '10' globals.arguments['--version'] = False globals.arguments['--single_thread'] = True globals.arguments['--dxtoolkit_path'] = dxtoolkit_path try: mskai = aimasking(config, jobname=jobname, envname=envname, run=run, mock=mock, username=username, password=password, protocol=protocol) if not mock: mskai.pull_jobexeclist() chk_status = mskai.chk_job_running() #print("chk_status={}".format(chk_status)) if chk_status != 0: print( " Job {} on Env {} is already running on engine {}. Please retry later" .format(jobname, envname, chk_status)) return except Exception as e: print("Error in MSK module") print(str(e)) return try: print_debug(" ") print_debug(" ") print_debug(" ") print_debug(" ") print_debug("Capture CPU usage data...") scriptdir = os.path.dirname(os.path.abspath(__file__)) outputdir = os.path.join(scriptdir, 'output') print_debug("dxtoolkit_path: {}".format(dxtoolkit_path)) aive = virtualization(config, config_file_path='./dxtools.conf', scriptdir=scriptdir, outputdir=outputdir, protocol=protocol, dxtoolkit_path=dxtoolkit_path) print_debug("dxtoolkit_path: {}".format(dxtoolkit_path)) aive.gen_cpu_file() print_debug("Capture CPU usage data : done") print_debug(" ") print_debug(" ") print_debug(" ") print_debug(" ") except: print("Error in VE module") return try: mskai = aimasking(config, jobname=jobname, envname=envname, run=run, mock=mock, username=username, password=password, protocol=protocol) mskai.run_job() except Exception as e: print("Error in MSK module") print(str(e)) return
def sync_globalobj(config, srcmskengname, tgtmskengname, globalobjsync, username, password, protocol): """ This module will sync global objects between 2 engines""" print_banner() if config.debug: globals.initialize() globals.debug = config.debug if config.verbose: print_debug('Verbose mode enabled') print_debug('srcmskengname = {0}'.format(srcmskengname)) print_debug('tgtmskengname = {0}'.format(tgtmskengname)) print_debug('globalobjsync = {0}'.format(globalobjsync)) print_debug('username = {0}'.format(username)) print_debug('protocol = {0}'.format(protocol)) try: mskai = aimasking(config, srcmskengname=srcmskengname, tgtmskengname=tgtmskengname, globalobjsync=globalobjsync, username=username, password=password, protocol=protocol) mskai.sync_globalobj() except Exception as e: print("Error in MSK module") print(str(e)) return
def pull_joblist(self): csvdir = self.outputdir if self.mskengname == 'all': try: if os.path.exists(self.joblistfile): os.remove(self.joblistfile) f = open(self.joblistfile, "w") f.write("{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address")) f.close() else: f = open(self.joblistfile, "w") f.write("{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address")) f.close() except: print_debug("Error deleting file ", self.joblistfile) engine_list = self.create_dictobj(self.enginelistfile) for engine in engine_list: engine_name = engine['ip_address'] apikey = self.get_auth_key(engine_name) if apikey is not None: apicall = "environments?page_number=1" envlist_response = self.get_api_response( engine_name, apikey, apicall) f = open(self.joblistfile, "a") for envname in envlist_response['responseList']: jobapicall = "masking-jobs?page_number=1&environment_id={}".format( envname['environmentId']) joblist_response = self.get_api_response( engine_name, apikey, jobapicall) joblist_responselist = joblist_response['responseList'] for joblist in joblist_responselist: f.write("{},{},{},{},{},{},{}\n".format( joblist['maskingJobId'], joblist['jobName'], joblist['maxMemory'], '0', envname['environmentId'], envname['environmentName'], engine_name)) f.close() print("File {} successfully updated with jobs from {}". format(self.joblistfile, engine_name)) else: # Delete existing jobs for particular engine newjoblist = [] try: i = 0 if os.path.exists(self.joblistfile): job_list = self.create_dictobj(self.joblistfile) for job in job_list: if self.mskengname != job['ip_address']: newjoblist.append(job) else: i = 1 print( "Existing Job {} deleted for engine {}".format( job['jobname'], self.mskengname)) if i == 1: try: if os.path.exists(self.joblistfile): os.remove(self.joblistfile) f = open(self.joblistfile, "w") f.write("{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address")) f.close() except: print_debug("Error deleting file ", self.joblistfile) f = open(self.joblistfile, "a") for job in newjoblist: f.write("{},{},{},{},{},{},{}\n".format( job['jobid'], job['jobname'], job['jobmaxmemory'], job['reservememory'], job['environmentid'], job['environmentname'], job['ip_address'])) f.close() else: print("No existing jobs found for Engine {} in pool". format(self.mskengname)) else: print("File {} does not exists. Creating it".format( self.joblistfile)) f = open(self.joblistfile, "w") f.write("{},{},{},{},{},{},{}\n".format( "jobid", "jobname", "jobmaxmemory", "reservememory", "environmentid", "environmentname", "ip_address")) f.close() except Exception as e: print_debug(str(e)) print_debug( "Error deleting jobs for engine {} in file {}".format( self.mskengname, self.joblistfile)) # Pull New List engine_name = self.mskengname apikey = self.get_auth_key(engine_name) if apikey is not None: apicall = "environments?page_number=1" envlist_response = self.get_api_response( engine_name, apikey, apicall) f = open(self.joblistfile, "a") for envname in envlist_response['responseList']: jobapicall = "masking-jobs?page_number=1&environment_id={}".format( envname['environmentId']) joblist_response = self.get_api_response( engine_name, apikey, jobapicall) joblist_responselist = joblist_response['responseList'] for joblist in joblist_responselist: f.write("{},{},{},{},{},{},{}\n".format( joblist['maskingJobId'], joblist['jobName'], joblist['maxMemory'], '0', envname['environmentId'], envname['environmentName'], engine_name)) f.close() print( "Job list for engine {} successfully generated in file {}". format(self.mskengname, self.joblistfile))
def read_configs(self): # on windows # os.system('color') ####self.pull_jobexeclist() engine_list = self.create_dictobj(self.enginelistfile) job_list = self.create_dictobj(self.joblistfile) jobexec_list = self.create_dictobj(self.jobexeclistfile) enginecpu_list = self.create_dictobj(self.enginecpulistfile) self.df_enginelist = pd.read_csv(self.enginelistfile) self.df_enginelist['totalgb'] = self.df_enginelist['totalgb'] * 1024 self.df_enginelist['systemgb'] = self.df_enginelist['systemgb'] * 1024 self.df_enginelist.rename(columns={ 'totalgb': 'totalmb', 'systemgb': 'systemmb' }, inplace=True) enginelist = [] for engine in engine_list: engine_list_dict = collections.OrderedDict( ip_address=engine['ip_address'], totalmb=int(engine['totalgb']) * 1024, systemmb=int(engine['systemgb']) * 1024) enginelist.append(engine_list_dict) print_debug("engine_list:\n{}".format(engine_list)) print_debug("enginelist:\n{}".format(enginelist)) engine_list = enginelist if os.path.exists(self.enginecpulistfile): self.df_enginecpulist = pd.read_csv(self.enginecpulistfile) if self.df_enginecpulist.empty: self.df_enginecpulist['cpu'] = (100 - self.df_enginecpulist['cpu']) self.df_joblist = pd.read_csv(self.joblistfile) self.df_jobexeclist = pd.read_csv(self.jobexeclistfile) self.df_joblistunq = self.df_joblist.drop_duplicates(subset=[ 'jobid', 'jobname', 'jobmaxmemory', 'reservememory', 'environmentid', 'environmentname' ], keep='first') job_requirement = self.df_joblistunq.query( "environmentname == @self.envname and jobid == @self.jobid") jobmaxmemory = job_requirement['jobmaxmemory'].values[0] reservememory = job_requirement['reservememory'].values[0] bannertext = banner() print(" ") print((colored(bannertext.banner_sl_box(text="Requirements:"), 'yellow'))) print(' Jobid = {}'.format(self.jobid)) print(' Env = {}'.format(self.envname)) print(' MaxMB = {} MB'.format(jobmaxmemory)) print(' ReserveMB = {} MB'.format(reservememory)) print(' Total = {} MB'.format(jobmaxmemory + reservememory)) if self.config.verbose or self.config.debug: print((colored( bannertext.banner_sl_box(text="Available Engine Pool:"), 'yellow'))) print('{0:>1}{1:<35}{2:>20}{3:>20}'.format("", "Engine Name", "Total Memory(MB)", "System Memory(MB)")) for ind in self.df_enginelist.index: print('{0:>1}{1:<35}{2:>20}{3:>20}'.format( " ", self.df_enginelist['ip_address'][ind], self.df_enginelist['totalmb'][ind], self.df_enginelist['systemmb'][ind])) print((colored(bannertext.banner_sl_box(text="CPU Usage:"), 'yellow'))) print('{0:>1}{1:<35}{2:>20}'.format("", "Engine Name", "Used CPU(%)")) for ind in enginecpu_list: print('{0:>1}{1:<35}{2:>20}'.format(" ", ind['ip_address'], ind['cpu'])) engineusage = self.df_jobexeclist.query( "jobstatus == 'RUNNING'").groupby( 'ip_address')['jobmaxmemory'].sum().reset_index( name="totalusedmemory") if engineusage.empty: engineusage = pd.DataFrame() engineusage = self.df_enginelist[['ip_address']].copy() engineusage['totalusedmemory'] = 0 print((colored(bannertext.banner_sl_box(text="Memory Usage:"), 'yellow'))) print('{0:>1}{1:<35}{2:>20}'.format("", "Engine Name", "Used Memory(MB)")) for ind in engineusage.index: print('{0:>1}{1:<35}{2:>20}'.format( " ", engineusage['ip_address'][ind], engineusage['totalusedmemory'][ind])) # for ind in engineusage_od: # print ('{0:>1}{1:<35}{2:>20}'.format(" ",ind['ip_address'],ind['totalusedmemory'] )) if self.config.verbose or self.config.debug: print((colored( bannertext.banner_sl_box(text="Engine Current Usage:"), 'yellow'))) print('{0:>1}{1:<35}{2:>20}{3:>20}'.format("", "Engine Name", "Used Memory(MB)", "Used CPU(%)")) if self.df_enginecpulist.empty: engineusage['cpu'] = 0 else: engineusage = pd.merge(engineusage, self.df_enginecpulist, on="ip_address", how="left").fillna(0) if self.config.verbose or self.config.debug: for ind in engineusage.index: print('{0:>1}{1:<35}{2:>20}{3:>20}'.format( " ", engineusage['ip_address'][ind], engineusage['totalusedmemory'][ind], engineusage['cpu'][ind])) if self.config.verbose or self.config.debug: print((colored( bannertext.banner_sl_box( text="Shortlisted Engines for running Job:"), 'yellow'))) print('{0:>1}{1:<35}{2:>20}{3:>20}'.format("", "Engine Name", "Job ID", "Env Name")) engine_pool_for_job = self.df_joblist.query( "environmentname == @self.envname and jobid == @self.jobid") if self.config.verbose or self.config.debug: for ind in engine_pool_for_job.index: print('{0:>1}{1:<35}{2:>20}{3:>20}'.format( " ", engine_pool_for_job['ip_address'][ind], engine_pool_for_job['jobid'][ind], engine_pool_for_job['environmentname'][ind])) # print((colored(bannertext.banner_sl_box(text="Result:"),'yellow'))) jpd1 = pd.merge(engine_pool_for_job, self.df_enginelist, on="ip_address", how="left") jpd2 = pd.merge(jpd1, engineusage, on="ip_address", how="left").fillna(0) jpd2['availablemb'] = jpd2['totalmb'] - jpd2['systemmb'] - jpd2[ 'totalusedmemory'] - jobmaxmemory - reservememory qualified_engines = jpd2.query("availablemb > 0") unqualified_engines = jpd2.query("availablemb < 1") if qualified_engines.empty: redcandidate = unqualified_engines.groupby( 'ip_address')['availablemb'].max().reset_index( name="maxavailablememory") redcandidate['maxavailablememory'] = redcandidate[ 'maxavailablememory'] + jobmaxmemory + reservememory if self.df_enginecpulist.empty: redcandidatewithcpu = redcandidate redcandidate['cpu'] = 0 else: redcandidatewithcpu = pd.merge(redcandidate, self.df_enginecpulist, on="ip_address", how="left").fillna(0) if self.config.verbose or self.config.debug: print((colored(bannertext.banner_sl_box(text="Red Engines:"), 'yellow'))) print(colored(redcandidatewithcpu, 'red')) print( " All engines are busy. Running job# {} of environment {} may cause issues." .format(self.jobid, self.envname)) print( " Existing jobs may complete after sometime and create additional capacity to execute new job." ) print(" Please retry later.") else: if not unqualified_engines.empty: redcandidate = unqualified_engines.groupby( 'ip_address')['availablemb'].max().reset_index( name="maxavailablememory") redcandidate['maxavailablememory'] = redcandidate[ 'maxavailablememory'] + jobmaxmemory + reservememory if self.df_enginecpulist.empty: redcandidatewithcpu = redcandidate redcandidate['cpu'] = 0 else: redcandidatewithcpu = pd.merge(redcandidate, self.df_enginecpulist, on="ip_address", how="left").fillna(0) if self.config.verbose or self.config.debug: print( (colored(bannertext.banner_sl_box(text="Red Engines:"), 'yellow'))) print(colored(redcandidatewithcpu, 'red')) bestcandidate = qualified_engines.groupby( 'ip_address')['availablemb'].max().reset_index( name="maxavailablememory") if self.df_enginecpulist.empty: bestcandidatedetails = bestcandidate bestcandidatedetails['cpu'] = 0 else: bestcandidatedetails = pd.merge(bestcandidate, self.df_enginecpulist, on="ip_address", how="left").fillna(0) if self.config.verbose or self.config.debug: print((colored(bannertext.banner_sl_box(text="Green Engines:"), 'yellow'))) print(colored(bestcandidatedetails, 'green')) print((colored(bannertext.banner_sl_box(text="Best Candidate:"), 'yellow'))) print(" ") win_engine = bestcandidatedetails.iloc[ bestcandidatedetails['maxavailablememory'].idxmax()] engine_name = win_engine['ip_address'] engine_mem = win_engine['maxavailablememory'] engine_cpu = win_engine['cpu'] print( colored( " Engine : {} , Available Memory : {} MB , Available CPU : {}% " .format(engine_name, engine_mem, engine_cpu), color='green', attrs=['reverse', 'blink', 'bold'])) if self.run: apikey = self.get_auth_key(engine_name) # print(apikey) job_exec_response = self.exec_job(engine_name, apikey, self.jobid) if job_exec_response is not None: if job_exec_response['status'] == 'RUNNING': executionId = job_exec_response['executionId'] # print(colored(" Execution of Masking job# {} with execution ID {} on Engine {} is in progress".format(self.jobid,executionId,engine_name),'green')) print_green_on_white = lambda x: cprint( x, 'blue', 'on_white') print_green_on_white( " Execution of Masking job# {} with execution ID {} on Engine {} is in progress" .format(self.jobid, executionId, engine_name)) else: # print(colored(" Execution of Masking job# {} on Engine {} failed".format(self.jobid,engine_name),'red')) print_red_on_white = lambda x: cprint( x, 'red', 'on_white') print_red_on_white( " Execution of Masking job# {} on Engine {} failed" .format(self.jobid, engine_name)) else: print_red_on_white = lambda x: cprint(x, 'red', 'on_white') print_red_on_white( " Execution of Masking job# {} on Engine {} failed". format(self.jobid, engine_name)) print(" ")