def fetch_config(self): """ This function is the main method where everything is governed on the manager. :return: :rtype: """ while True: config = Config() config.get_account() newconfig = config.init_db_config() userinfo = config.request_user_creation(newconfig) grouplist = newconfig.find_groupnames( newconfig.get_account().get_groups()) self.interpreterServer.createAccounts(userinfo) path = newconfig.get_script_path() worklist = ["python " + path + "check_http.py db.no"] for i in grouplist: groupdict = {} groupdict.update({i: worklist}) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) queue = Queues() timestart = time.time() while time.time() - timestart <= float(newconfig.get_interval()): queue.receive_one_message_from_q("reportq", time.time() - timestart, newconfig.get_interval())
def fetch_config(self): """ This function is the main method where everything is governed on the manager. :return: :rtype: """ math = WebUseMath() config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") ip = userconfig["ipaddress"] worklist = [] worklist = [{ "ip": ip, "sentance": userconfig["Sentance"], "filepath": userconfig["filepath"], "file": userconfig["file"], "timestamp": time.time() }] groupdict = {} groupdict.update({i: worklist}) logging.critical(str(i) + " " + str(worklist)) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("purser_report_q", str(newconfig.get_interval()))
def run_clerk(self): """ This function deploys one test per user by creating an executable string which is sent to the RabbitMQ instance. :return: :rtype: """ math = WebUseMath() config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") ip = userconfig["ipaddress"] worklist = [] tenant_name = userconfig["tenant_name"] executable_string = "/root/uptime_challenge_master/testscript/clerk.pl -n " + tenant_name worklist.append(executable_string) groupdict = {} groupdict.update({i: worklist}) logging.critical(str(i) + " " + str(worklist)) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("clerk_reportq", str(newconfig.get_interval()))
def fetch_config(self): """ This function is the main method where everything is governed on the manager. :return: :rtype: """ math = WebUseMath() strengthlist = math.create_time_list() position = 0 config = Config() newconfig = config.initDbConfig() grouplist = newconfig.getAccount().get_groups() path = newconfig.get_script_path() index = 0 logging.info("Interval" + str(newconfig.get_interval())) positiondict = {} ip = "" for i in grouplist: userconfig = self.interpreterServer.getFileAndOffsetFromUser(i) ipconfig = self.interpreterServer.getIpFromUser(i) ip = "" ip = ipconfig["ipaddress"] executable_string = path + "webuse.pl -U " + ip + " -r '10:10:10:10'" logging.info(str(userconfig)) index = int(userconfig["offset"]) logging.info("INDEX " + str(index)) content = math.decideEntry(strengthlist, index) worklist = [] listvalues = math.convertToList(content) position = int(listvalues[0]) strength_number = math.calculateList(listvalues) worklist = math.create_number_of_scripts(strength_number, executable_string) groupdict = {} groupdict.update({i: worklist}) newconfig.createWorkQ(newconfig.get_queue_name(), groupdict) worklist = [] positiondict.update({i: position}) while True: for i, position in positiondict.iteritems(): worklist = [] ip = "" ipconfig = self.interpreterServer.getIpFromUser(i) ip = ipconfig["ipaddress"] executable_string = path + "webuse.pl -U " + ip + " -r '10:10:10:10'" logging.info("USER: "******" POSITION: " + str(position)) strength_value_as_string = math.jumpToNextEntry(strengthlist, int(position)) values_in_value_string = math.convertToList(strength_value_as_string) strength_number = math.calculateList(values_in_value_string) worklist = math.create_number_of_scripts(strength_number, executable_string) groupdict = {} groupdict.update({i: worklist}) if position == 288: positiondict[i] = 0 else: positiondict[i] = position + 1 newconfig.createWorkQ(newconfig.get_queue_name(), groupdict) queue = Queues() queue.receive_one_message_from_q("webusereportq", newconfig.get_interval())
def run_httperf(self): """ This function uses workload-profiles to generate a set of tests per deployment. This manager needs the file transsine.dat to successfully execute. :return: :rtype: """ math = WebUseMath() strengthlist = math.create_time_list() position = 0 config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() executable_string = path + "traffic.sh" index = 0 logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} for i in grouplist: ip = "" userconfig = self.interpreterServer.getFileAndOffsetFromUser(i) ipconfig = self.interpreterServer.getIpFromUser(i) ip = ipconfig["ipaddress"] index = int(userconfig["offset"]) logging.info("INDEX: " + str(index)) content = math.decide_entry(strengthlist, index) worklist = [] listvalues = math.convert_to_list(content) position = int(listvalues[0]) logging.info("USER: "******" POSITION: " + str(position)) strength_number = math.calculatelist(listvalues) worklist = math.create_httperf_string(ip, strength_number, executable_string) groupdict = {} groupdict.update({i: worklist}) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] positiondict.update({i: position}) while True: for i, position in positiondict.iteritems(): logging.info("USER: "******" POSITION: " + str(position)) ip = "" ipconfig = self.interpreterServer.getIpFromUser(i) ip = ipconfig["ipaddress"] strength_value_as_string = math.jump_to_next_entry(strengthlist, int(position)) values_in_value_string = math.convert_to_list(strength_value_as_string) strength_number = math.calculatelist(values_in_value_string) worklist = math.create_httperf_string(ip, strength_number, executable_string) groupdict = {} groupdict.update({i: worklist}) if position == 288: positiondict[i] = 0 else: positiondict[i] = position + 1 newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) queue = Queues() queue.receive_one_message_from_q("httperfreportq", newconfig.get_interval())
def send_users_to_queue(self, accountlist): """ OUTDATED- This function converts the accountlist to a string that can be sent over a queue. :param accountlist: :type accountlist: :return: :rtype: """ listtostring = accountlist queue = Queues() queue.create_queue("createuserq", str(listtostring))
def create_report_queue(self, queuename, content): """ Create the report queue :param queuename: :type queuename: :param content: :type content: :return: :rtype: """ queue = Queues() queue.create_queue(queuename, content)
def create_work_queue(self, queuename, joblist): """ Function to create a workqueue that is used to put jobs on :param queuename: :type String: :param joblist: :type list: :return: :rtype: """ queue = Queues() for group, job in joblist.iteritems(): for j in job: jobdict = {} jobdict.update({group: j}) queue.create_queue(queuename, jobdict)
def run_leeshore(self): """ This function deploys one leeshore-test per user every second hour. s :return: :rtype: """ day = int(sys.argv[1]) config = Config() newconfig = config.init_db_config() grouplist = newconfig.get_account().get_groups() path = newconfig.get_script_path() runinterval = int(newconfig.get_interval()) / len(grouplist) logging.info("Interval: " + str(newconfig.get_interval())) positiondict = {} while True: for i in grouplist: userconfig = self.interpreterServer.getUserConfig(i, "couchdb") if datetime.datetime.today().weekday( ) == day or userconfig["leeshore_enabled"] == 0: logging.critical("Today is a day off for leeshore") time.sleep(runinterval) else: tenant_name = userconfig["tenant_name"] ip = userconfig["ipaddress"] executable_string = "" executable_string = "/root/uptime_challenge_master/testscript/leeshore_short.pl -n " + tenant_name worklist = [] worklist.append(executable_string) groupdict = {} groupdict.update({i: worklist}) newconfig.create_work_queue(newconfig.get_queue_name(), groupdict) worklist = [] queue = Queues() queue.receive_one_message_from_q("leeshore_reportq", str(runinterval))