def Inititalize(): global MyExternalGlobals if MyRequest.ExternalGlobals is not None: Log("Parsing external variables") MyExternalGlobals = ExternalGlobals() else: Log("No external variables found")
def DoDisplayStation(listStatNumbers): Log("->DisplayStation.DoDisplayStation") dumpLocation = ExternalGlobals.MyExternalGlobals.GetVariable( ExternalGlobals.GENERAL, ExternalGlobals.SWITCHDUMPLOCATION) if dumpLocation == None or dumpLocation.strip() == "": raise Exception( "There is no location to dump the report to. Please specify the report dump location in the Cairs Switch > Global Variables > General.SwitchDumpLocation" ) fileName = "AvayaDispStats.txt" MyRequest.ReturnDictionary[ ScriptReturn.SWITCH_DUMP_LOCATION] = "{0}\{1}".format( dumpLocation, fileName) try: fullPath = "{0}\\{1}".format(dumpLocation, fileName) if SSH.RerouteOutputToFile(fullPath): Log("File output rerouted to location:{0}".format(fullPath)) Log("...") for num in listStatNumbers: # Log("BEGINNUMBER:{0}".format(num)) GetStationInfo(num) # Log("ENDNUMBER:{0}".format(num)) else: #won't hit here... will error on the c# side. raise Exception("Couldn't reroute file") except Exception as e: raise e finally: SSH.FillBuffer() SSH.EndRouteOutputToFile() Log("Display Stations Complete!")
def run(self): Log().info("master pid<{}>".format(os.getpid())) try: self.ScenarioHandler.scenario_init() max_cases_number = self.ScenarioHandler.max_cases_count() self.QueueServer.auto_register(max_cases_number) self.QueueServer.start() self.WorkerHandler = WorkerHandler() self.WorkerHandler.init_workers(scenario=self.scenario) self.WorkerProcess = WorkerProcess() self.WorkerProcess.setDaemon(True) self.WorkerProcess.start() self.WorkerMonitor = WorkerMonitor() self.WorkerMonitor.setDaemon(True) self.WorkerMonitor.start() except Exception as err: Log().error(PRINT_RED + str(err) + PRINT_END) traceback.print_exc(err) return while self.running: self.command_process() if not self.WorkerProcess.is_alive() and self.running: Log().error("worker process thread exit.") if not self.WorkerMonitor.is_alive() and self.running: Log().error("worker monitor thread exit.")
def DoChangeSet(): Log("-> DoChangeSet") changeDictionary = DoPreChangeVerification() Login() ext = changeDictionary["EXTENSION"] Log("Changing Station: {0}".format(ext)) SSH.SendCommand("Change Station {0}".format(ext)) LastPage = False while not LastPage: pageHeader = SSH.WaitForPattern(STATIONPAGEPATTERN, 1) if (pageHeader != None): pageNumbers = Page.GetPageDetails(pageHeader) currentPage = pageNumbers.group(1) maxPage = pageNumbers.group(2) LastPage = (currentPage == maxPage) Log("page {0} of {1}".format(currentPage, maxPage)) SSH.SendCommand(HELP) DoPage(changeDictionary) if (not LastPage): SSH.SendCommand(NEXTPAGE) else: raise Exception( "Station {0} is locked, invalid or does not exist.".format( ext)) SSH.SendCommand(ENTER) SSH.WaitForData("Command successfully completed") return True
def loadMethod(self, inst, fnname): if not fnname.startswith('_'): fn = getattr(inst, fnname) if hasattr(fn, '__call__'): Log(3, "Found method %s" % fnname) if self.main.pktCapture.slTemplate.byName.has_key(fnname): self.filters[fnname] = self.filters.get(fnname, []) self.filters[fnname].append(fn) Log(2, 'Added message %s' % fnname)
def handle(self, **kwargs): Log().info("@[{}]".format(self.__class__.__name__)) single = kwargs["single"] operation = kwargs["operation"] mkdir(CACHE_DIR) if single and operation == "start": clean_folder(CACHE_DIR) remove(CACHE_FILE) Log().info("cleaning data folder") super(InitEnv, self).handle(**kwargs)
def handle(self, **kwargs): Log().info("@[{}]".format(self.__class__.__name__)) scenario = kwargs["scenario"] if not is_open("localhost", 5000): # Master server is not running, will do start. Log().info("Master Server starting... ") master_server = MasterServer() master_server.set_scenario(scenario) master_server.start() else: Log().info("Master Server already started.") super(StartMaster, self).handle(**kwargs)
def loadClass(self, mod, clsname): if not clsname.startswith('_'): Log(3, "Found class %s" % clsname) cls = getattr(mod, clsname) if hasattr(cls, '__bases__') and self.__filterClass in cls.__bases__: pyfile = os.path.basename(mod.__file__) Log(1, 'Loaded filter %s from %s' % (clsname, pyfile)) inst = cls(self.main) fnlist = dir(inst) fnlist.sort() for fnname in fnlist: self.loadMethod(inst, fnname)
def wapper(self, *args, **kwargs): try: result = func(self, *args, **kwargs) return result except Exception as err: Log().error(traceback.format_exc()) raise Exception(err)
def write(self, data): data = str(data) Log(4, "WRITE: %r" % data) length = self.fd.write(data) if hasattr(self.fd, 'flush'): self.fd.flush() return length
def pullMessage(self, name): #req = request.json Log(INFO, 'Received pull request for \"name\"= ' + name) b, obj = self.pendingDb.find('To', name) if obj: self.pendingDb.remove(obj) self.archiveDb.insert(obj) Log(INFO, 'Return message= ' + str(obj)) return {"success": True, "message": obj} Log(INFO, 'Not Found message') return {"success": False}
def __init__(self, channelJson): """The constructor""" self.token = str(channelJson["uid"]) self.title = channelJson["title"] self.genres = channelJson["metadata"] self.image = channelJson["image"] self.logger = Log().getLogger()
def calckBins(self, data, _type, step): sbins = None tbins = None if _type == 'ping': sbins = dask.delayed( xr.DataArray(np.arange(0, len(data['ping_time'])))) tbins = dask.delayed( xr.DataArray(np.arange(0, len(data['ping_time']), step))) self.ping_time = dask.delayed( xr.DataArray(data['ping_time'].isel(ping_time=np.arange( 0, len(data['ping_time']), step).astype(np.int32)).values)) elif _type == 'time': sbins = dask.delayed(self.calckTimeInSeconds)(data['ping_time']) tbins = dask.delayed( xr.DataArray(np.arange(0, sbins[-1].compute(), step))) self.ping_time = dask.delayed( xr.DataArray( np.arange(data['ping_time'][0].compute().values, data['ping_time'][-1].compute().values, np.timedelta64(step, 's')))) elif _type == 'range': sbins = data['range'] tbins = dask.delayed( xr.DataArray(np.arange(0, data['range'][-1], step))) else: Log().error('{} integration type not defined'.format(_type)) return sbins, tbins
def getChannelByCid(cid): # Should return a dictionary that represents all channels assocated with the given token logger = Log().getLogger() logger.info("\nGetting Channel By CID: {}".format(str(cid))) try: dbConnection = pymysql.connect(host=settings.hostname, user=settings.username, passwd=settings.password, db=settings.database) with dbConnection.cursor(pymysql.cursors.DictCursor) as cursor: # queryChannels = ("SELECT * FROM `channel` WHERE `cid` = \"{}\"").format(str(cid)) # cursor.execute(queryChannels) # channels = cursor.fetchall() # for channel in channels: # queryChannelMetadata = ("SELECT M.genre, M.mid, C.score " # "FROM channel_metadata C, metadata M " # "WHERE C.cid = \"{}\" AND C.mid = M.mid").format(str(channel["cid"])) # cursor.execute(queryChannelMetadata) # channel["metadata"] = cursor.fetchall() # logger.info("Returning channels: {}".format(str(channels))) # return channels queryChannels = ("SELECT * FROM `channel` WHERE `cid` = \"{}\"").format(str(cid)) cursor.execute(queryChannels) channel = cursor.fetchone() logger.info("Fetched channel " + str(channel)) queryChannelMetadata = ("SELECT M.genre, M.mid, C.score " "FROM channel_metadata C, metadata M " "WHERE C.cid = \"{}\" AND C.mid = M.mid").format(str(cid)) cursor.execute(queryChannelMetadata) channel["metadata"] = cursor.fetchall() logger.info("Returning channels: {}".format(str(channel))) return channel except Warning as warn: return None
def __init__(self, CustomerSeg): """ Constructor for LinearInversion class object. No parameters are given. Necessary information from the estimator class is provided later in the execution by QST_sim. Args: CustomerSeg: Customer Segmentation test object """ # Attribute all object variables to class self.CustomerSeg = CustomerSeg #Initialize all metadata and logging variables self.action_dict = {} self.num_preprocess_actions = 0 self.execution_duration_sec = 0 #Instantiate preprocess log self.PreprocessLog = Log("Not Applicable", "%s_Preprocess-Log" % (CustomerSeg.test_name), "Not Applicable", directory=self.CustomerSeg.Log.directory) self.PreprocessLog.master_log_name = self.CustomerSeg.Log.master_log_name self.PreprocessLog.test_number = self.CustomerSeg.Log.test_number #Set preprocess log filename self.PreprocessLogFilename = self.PreprocessLog.preprocess_log_name + ".csv"
def __init__(self): self.instrument_id = Instrument_id api_key = 'a81e9a56-6cc0-4f7a-a146-a9fc590b3d63' seceret_key = '5C59BB0410BE0989D3C8861E00CD70BF' passphrase = 'Position_info_API' self.futureAPI = future.FutureAPI(api_key, seceret_key, passphrase, True) self.log = Log("Position_info_API.txt")
def handle(self, **kwargs): Log().info("@[{}]".format(self.__class__.__name__)) master_client = MasterClient() if kwargs["operation"]: master_client.start(console=False) operation = kwargs["operation"] if operation == "start": output = master_client.one_click_start() Log().info(output) elif operation == "stop": output = master_client.one_click_stop() Log().info(output) elif operation == "result": output = master_client.one_click_result() Log().info(output) elif operation == "check": output = master_client.one_click_check() Log().info(output) elif operation == "clean": output = master_client.one_click_clean() Log().info(output) elif operation == "exit": output = master_client.one_click_exit() Log().info(output) else: pass master_client.stop() else: # start console master_client.start() super(AttachMaster, self).handle(**kwargs)
def loadModule(self, package, modname): mod = getattr(package, modname) if not modname.startswith('_'): Log(3, "Found module %s" % modname) if type(mod) == types.ModuleType: clslist = dir(mod) clslist.sort() for clsname in clslist: self.loadClass(mod, clsname)
def __init__(self): self.instrument_id = Instrument_id self.beishu = BEISHU api_key = '50efe898-4ee3-4f7e-bc5a-05e6b955f441' seceret_key = '44CB9460F6FB6C906D647390AB650E55' passphrase = 'Take_order_API' self.futureAPI = future.FutureAPI(api_key, seceret_key, passphrase, True) self.log = Log("Take_order_API.txt")
def unregisterUser(self): req = request.json name = req['name'] Log(INFO, 'Received request = ' + str(req)) isExist, obj = self.isUserExistInDb('name', name) if not isExist: theBody = json.dumps({'reason': 'Not exist user ' + name}) return HTTPResponse(status=405, body=theBody) if not self.deleteUserFromDb(obj): theBody = json.dumps({'reason': 'Failed to remove user ' + name}) return HTTPResponse(status=405, body=theBody) theBody = json.dumps({'reason': 'Deleted'}) Log(INFO, 'Deleted user \"name\"= ' + name) return HTTPResponse(status=200, body=theBody)
def pushMessage(self): req = request.json ''' Perform Name validations ''' Log(INFO, 'Received push request ' + str(req)) self.pendingDb.insert(req) return {"success": True}
def ManagePage(page, p): if page == None: #could be pages end??? response = SSH.GetCurrentMessage() if "Command:" in response: ParsePage(response, p, False) Log("Finished ListStat!") return True else: Log("ERROR: Not end / Not page: {0}".format(response)) raise Exception("Not getting response") else: ParsePage(page, p) #####DEBUGGING # return True #####END SSH.SendCommand(NEXTPAGE) return False
def read(self, max=None): if hasattr(self.fd, 'flush'): self.fd.flush() if max is None: data = self.fd.read() else: data = self.fd.read(max) Log(4, "READ: %r" % data) return data
def writeline(self, data): data = str(data) Log(4, "WRITELINE: %r" % data) if hasattr(self.fd, 'writeline'): length = self.fd.writeline(data) else: length = self.fd.write(data + HTTPConstant.newline) if hasattr(self.fd, 'flush'): self.fd.flush() return length
def registerUser(self): req = request.json name = req['name'] Log(INFO, 'Received request = ' + str(req)) isExist, obj = self.isUserExistInDb('name', name) if isExist: Log(INFO, 'Already exists user \"name\"= ' + name) theBody = json.dumps({'reason': 'Already exist user ' + name}) return HTTPResponse(status=405, body=theBody) if not self.addUserToDb(req): Log(ERROR, 'Failed to register user \"name\"= ' + name) theBody = json.dumps({'reason': 'Failed to register user ' + name}) return HTTPResponse(status=405, body=theBody) Log(INFO, 'Added new user \"name\"= ' + name) return {"success": True, "Added user": name}
def DoPage(changeDict): SSH.ProgramBreak() positionSet = set() # currentLine = SSH.WaitForPattern(FIELDVALUEPATTERN, 2) # currentPage = SSH.WaitForPattern(PAGEENDINGPATTERN, 1) currentPage = SSH.WaitForData("[0;7m") if (currentPage != None): match = re.findall(FIELDVALUEPATTERN, currentPage) Log("matches:{0}".format(match)) else: raise Exception("CurrentLine blank!")
def __init__(self): if MyRequest.ExternalGlobals is not None: externalGlobalCount = len(MyRequest.ExternalGlobals) self.LoadGlobalsDictionary() else: externalGlobalCount = 0 Log( "***External Global Variables: {0} (Switch Dump Location - {1})". format(externalGlobalCount, self.GetVariable(GENERAL, SWITCHDUMPLOCATION)), False)
def __init__(self): self.instrument_id = Instrument_id api_key = 'cc94168a-060d-409c-81b5-bbb3e329bf15' seceret_key = '5F21098F1D113AA02A00FFDEF31761A2' passphrase = 'Market_info_API' self.futureAPI = future.FutureAPI(api_key, seceret_key, passphrase, True) self.log = Log("Market_info_API.txt") db = dbm.open(self.instrument_id, 'c') if b'latest' not in db.keys(): db['latest'] = '2019-02-07T07:32:29.000Z' db.close()
def DoPreChangeVerification(): Log("Verifying required data") if (MyRequest.ParamDictionary == None): raise Exception("No parameters found") changeDict = json.loads(MyRequest.ParamDictionary) if (changeDict == None): raise Exception( "Parameters dictionary cannot be deserialized. Perhaps it's not in the correct Dictionary<string, string> format" ) if ("EXTENSION" not in changeDict): raise Exception("The Extension was not found in the parameters list") return changeDict
def readall(self, length=None): if hasattr(self.fd, 'flush'): self.fd.flush() if length is None: data = self.fd.read() else: data = '' while length - len(data): new_data = self.fd.read(length - len(data)) if not new_data: break data += new_data Log(4, "READALL: %r" % data) return data