def __init__(self, name='broker'): """ initialize broker class """ self.name = name self.log = logger.getLogger(self.name) self.log.debug('Initializing broker. Pandas version={0}'.format(pandas.__version__)) self.contracts = {} # a dict to keep track of subscribed contracts self.tws = ibConnection() # tws interface self.nextValidOrderId = None self.dataModel = Subscriptions(self.tws) # data container self.tws.registerAll(self.defaultHandler) #self.tws.register(self.debugHandler,message.TickPrice) self.tws.register(self.nextValidIdHandler, 'NextValidId') self.log.debug('Connecting to tws') self.tws.connect() self.tws.reqAccountUpdates(True, '')
def testDifferentLoggers(self): self.produce_logs() # produce 5 traces with 'root' self.assertEqual(OUTPUT_LOGGER.getStdout().count('root'), 5) self.log = logger.getLogger('other') self.produce_logs() # produce 5 traces with 'other' self.assertEqual(OUTPUT_LOGGER.getStdout().count('other'), 5)
def __init__(self): self.rd = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT) self.schedule_interval = settings.SCHEDULE_INTERVAL self.intervals_per_cycle = settings.INTERVALS_PER_CYCLE self.mon = None self.logger = logger.getLogger("Scheduler")
def __init__(self): self.log = l.getLogger('model') self.conf = EC.EngineConfig.Instance() # A Batch is a collection of vertex lists for batched rendering. self.batch = pyglet.graphics.Batch() # Mapping from sector to a list of positions inside that sector. self.sectors = {} # Simple function queue implementation. The queue is populated with # _show_block() and _hide_block() calls self.queue = deque() self._materialFactory = Materials.MaterialFactory.Instance() # all shown blocks. self.visibleWorld = {} # This defines all the blocks that are currently in the world. try: (self.world, self.player) = Savegame.Savegame.load() # make blocks visible after loading for position in self.world.getBlockPositions(): # sectorize blocks self.sectors.setdefault(Transform.sectorize(position, self.conf.getConfValue('sectorSize')), []).append(position) self.show_sector(0) except Exception, e: self.log.debug("Couldn't load a savegame. Creating new world ...") self.world = World.World() self.player = Player.Player() self.visibleWorld = {} self._initialize()
def __init__(self, queue, callback): self.queue = queue self.callback = callback self.running = True self.thread = threading.Thread(target=self.__read) self.thread.start() self.log = logger.getLogger()
def log_version (module, name, version, version_detail=None) : _log = getLogger (module) if version_detail : _log.info ('%-20s version: %s (%s)', name, version, version_detail) else : _log.info ('%-20s version: %s', name, version)
def __init__(self, auth_ip, auth_port, auth_url_path, admin_username, admin_password, admin_tenant_name): self._authn_url = auth_url_path self._authn_user = admin_username self._authn_password = admin_password self._authn_tenant_name = admin_tenant_name self.log = logger.getLogger(logger_name='KeystoneAuth') self._keystone_con = ContrailApiConnection(ip=auth_ip, port=auth_port)
def __init__(self, ip = "127.0.0.1", port = "8082", token=None): self.log = logger.getLogger(logger_name=self.__class__.__name__) self._ip = ip self._port = port token_header = {'X-AUTH-TOKEN': token} if token else {} self._api_con = ContrailApiConnection(ip=ip, port=port, headers = token_header)
def __init__(self, config): #for storing the configuration self.config = config #instantiate logger self.log = l.getLogger( self.__class__.__name__, self.config['LOG_LEVEL'], self.config['app_start_date'], self.config['LOG_PATH']) #get the path to database (sqlite) file self.db_name = self.config['MAIN_ROOT'] + "/db/nfpa.db" self.log.debug("SQLiteDatabaseAdapter class instantiated") self.enum_tables = ['cpu_makes', 'cpu_models', 'nic_makes', 'nic_models', 'traffic_names', 'traffic_packet_sizes', 'virtualization', 'vnf_drivers', 'vnf_functions', 'vnf_names'] self.abstract_tables = ['cpu', 'nic', 'traffic', 'vnf', 'measurements'] self.tables = copy.deepcopy(self.enum_tables) self.tables += self.abstract_tables self.connect()
def __init__(self, tester): self.tester = tester self.NAME = "ixia" self.logger = getLogger(self.NAME) super(IxiaPacketGenerator, self).__init__(self.get_ip_address(), self.NAME, self.get_password()) super(IxiaPacketGenerator, self).init_log(self.logger) self.tcl_cmds = [] self.chasId = None self.conRelation = {} ixiaRef = self.tester.get_external_traffic_generator() if ixiaRef is None or ixiaRef not in ixiacfg.ixiaPorts: return self.ixiaVersion = ixiacfg.ixiaPorts[ixiaRef]["Version"] self.ports = ixiacfg.ixiaPorts[ixiaRef]["Ports"] self.logger.info(self.ixiaVersion) self.logger.info(self.ports) self.tclServerIP = ixiacfg.ixiaPorts[ixiaRef]["IP"] # prepare tcl shell and ixia library self.send_expect("tclsh", "% ") self.send_expect("source ./IxiaWish.tcl", "% ") self.send_expect("set ::env(IXIA_VERSION) %s" % self.ixiaVersion, "% ") out = self.send_expect("package req IxTclHal", "% ") self.logger.debug("package req IxTclHal return:" + out) if self.ixiaVersion in out: if not self.tcl_server_login(): self.close() self.session = None
def __init__(self): self.output_queue = Queue(100) self.input_queue = Queue(100) self.sock = None self.running = True self.reader = None self.log = getLogger()
def main(): """ prepare logging object """ # logging.basicConfig(level=logging.DEBUG, # format='%(asctime)s %(levelname)s\t%(name)s\t: %(message)s', # datefmt='%a, %d %b %Y %H:%M:%S', # filename='debug.log') log = l.getLogger('main') log.debug(' #################### Starting ####################') """ Create config object and parse command line options """ conf = EC.EngineConfig.Instance() conf.setPath(os.path.abspath(os.path.join(os.path.dirname(__file__)))) conf.loadConfig() conf.setConfValue('baseDir', os.path.dirname(__file__)) log.debug((docopt(__doc__, version='0.1'))) log.debug('Creating core object') gfx = Engine.Core() # Hide the mouse cursor and prevent the mouse from leaving the window. log.debug('Setting up core object') gfx.set_exclusive_mouse(True) gfx.setup()
def scan(interface='wlan0'): zlog = logger.getLogger() cmd = ["iwlist", interface, "scan"] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) scantext = proc.stdout.read().decode('utf-8') return scantext
def __init__(self,name): self.name = name self.log = logger.getLogger(self.name) self.log.debug('class created.') self.dataDir = os.getenv("USERPROFILE")+'\\twpData\\symbols\\'+self.name self.log.debug('Data dir:'+self.dataDir) self.ohlc = None # historic OHLC data
def onRead(self, ioLoop): clt, cltInfo = self.sock.accept() if self.connNum >= self.maxConnNum: clt.close() return if not self.handshake(clt): logger.getLogger('Server').info( 'client %s:%s connect but is not websocket protocol.', *cltInfo) return self.connNum += 1 pxy = proxy.Proxy(clt) ioLoop.addEvent(pxy, ioLoop.E_READ) logger.getLogger('Server').info( 'websocket client %s:%s is connect. server is gone.', *cltInfo)
def __init__(self): self.log = l.getLogger('MaterialFactory') # get config object conf = EC.EngineConfig.Instance() self._materialPath = os.path.join(conf.getConfValue('baseDir'), 'ressources/materials/') self._materials = {} self.loadMaterials()
def send_reported_state_callback(self, status_code, user_context): log = logger.getLogger() log.info("Confirmation[{0}] for reported state received with:".format( user_context)) log.info(" status_code: {0}".format(status_code)) self.sendReportedStateCallbackCount += 1 log.info(" Total calls confirmed: {0}".format( self.sendReportedStateCallbackCount))
def __init__(self, ip="127.0.0.1", port="8082", token=None): self.log = logger.getLogger(logger_name=self.__class__.__name__) self._ip = ip self._port = port token_header = {'X-AUTH-TOKEN': token} if token else {} self._api_con = ContrailApiConnection(ip=ip, port=port, headers=token_header)
def __init__(self, parent=None): super(GraphWidget, self).__init__(parent=parent) self.editFlag = False # 未在编辑转态 self.controllerKey = ControllerManager().addController() self.sceneWidth = 10000 self.sceneHeight = 10000 self.bindingFile = None # 创建图形场景对象,传递控制键参数 self.scene = DiagramScene(self.controllerKey) # self.scene.setSceneRect(QRectF(-self.sceneWidth / 2.0, -self.sceneHeight / 2.0, # self.sceneWidth, self.sceneHeight)) # 将信号itemSelected连接到指定槽函数 self.scene.itemSelected.connect(self.itemSelected) self.scene.resetModeSignal.connect(self.modeReseted) self.scene.editSignal.connect(self.sceneEdited) # 创建图形视口对象,传入图形场景作对象为参数 self.view = DiagramView(self.scene) # self.view.setBackgroundBrush(QColor(230, 200, 167)) self.view.setBackgroundBrush(QColor(41, 41, 41)) self.view.setMouseTracking(True) # 视图鼠标跟踪 # 创建水平布局管理器对象 layout = QHBoxLayout() # 视图控件对象添加到布局管理器中 layout.addWidget(self.view) # 把布局管理器设置给需要布局的父控件 self.setLayout(layout) # 空白控件,新建节点快捷栏 self.blankWidget = QuickDockWidget() self.blankWidget.setParent(self) self.blankWidget.resize(900, 80) self.blankWidget.show() self.blankWidget.raise_() self.blankWidget.hide() # 默认掩藏节点快捷栏 self.findResult = set() self.findResultItems = [] self.targetNode = None self.prevNodeList = [] self.nextNodeList = [] self.prevEdgeList = [] self.nextEdgeList = [] controller = ControllerManager().getController(self.controllerKey) controller.setScene(self.scene) self.nameIndex = {} self.valueIndex = {} self.commentIndex = {} self.allNodes = set() self.logger = logger.getLogger('GraphWidget')
def __init__(self, ip = None, port = None, headers = None): self.log = logger.getLogger(logger_name=self.__class__.__name__) self.ip = ip self.port = port if (ip == None or port == None): return self.url = "http://%s:%s" % (ip, port) self._hdr = headers or {}
def getInfoWorker(self, url): zlog = logger.getLogger() result = urllib2.urlopen(self.info_climat.url).read() self.result_info_climat = json.loads(result) #time.sleep(1) #self.result_info_climat = test.METEO_TEST_RESULT zlog.logger.info("Got result") self.parseInfo() self.data_available = True
def __init__(self, domain): self.rd = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT) self.domain = domain self.flowaggrs = {} self.prios = self.get_prios() for prio in self.prios: self.flowaggrs[prio] = FlowAggr(domain, prio) self.logger = logger.getLogger("PQ#%s" % (self.domain.name))
def __init__(self): self._log = logger.getLogger("TK") dataDir = os.path.expanduser("~") + "/twpData" if not os.path.exists(dataDir): os.mkdir(dataDir) self._timeFormat = "%Y%m%d %H:%M:%S" self.dataFile = os.path.normpath(os.path.join(dataDir, "requests.txt")) self._log.debug("Data file: {0}".format(self.dataFile))
def __init__(self, line): self.line = line self.fn_name = None self.fn_params = [] self.log = logger.getLogger() if type(line) == list and len(line) > 0: self.fn_name = line[0].value for i in line[1:]: self.fn_params.append(i.value) self.log.debug('Function: %s Params:%s', self.fn_name, self.fn_params)
def __init__(self): self.logger = getLogger(self.__class__.__name__) dbparams = dict(host=getConfig()['task2']['db']['host'], db=getConfig()['task2']['db']['name'], user=getConfig()['task2']['db']['user'], passwd=getConfig()['task2']['db']['password'], charset=getConfig()['task2']['db']['charset'], cursorclass=pymysql.cursors.DictCursor, use_unicode=getConfig()['task2']['db']['use_unicode']) self.__dbpool = adbapi.ConnectionPool('pymysql', **dbparams)
def __init__(self): self._log = logger.getLogger('TK') dataDir = os.path.expanduser('~') + '/twpData' if not os.path.exists(dataDir): os.mkdir(dataDir) self._timeFormat = "%Y%m%d %H:%M:%S" self.dataFile = os.path.normpath(os.path.join(dataDir, 'requests.txt')) self._log.debug('Data file: {0}'.format(self.dataFile))
def __init__(self, worker_index, _host, _port_base): threading.Thread.__init__(self) self.host = _host self.port_base = _port_base self.buffsize = 1024 self.index = worker_index self.isCheckOrStop = 0 # 0: normal; 1: check, stop; 2: restart; -1: exit self.go_on = True self.logger = logger.getLogger("PS")
def __init__(self, req, link, data, **config): super(NMaaS_RESTAPI, self).__init__(req, link, data, **config) self.nmaas_network_controller_app = data[ nmaas_network_controller_instance_name] path = "%s/html/" % PATH print path self.static_app = DirectoryApp(path) self.nmaas = self.nmaas_network_controller_app self.log = l.getLogger(self.__class__.__name__, self.nmaas.debug_level)
def destroy_session(self, session=None): """ Destroy addtional session. """ for save_session in self.sessions: if save_session == session: save_session.close() logger = getLogger(save_session.name) logger.logger_exit() self.sessions.remove(save_session)
def create_session(self, name=""): """ Create new session for addtional useage. This session will not enable log. """ logger = getLogger(name) session = SSHConnection(self.get_ip_address(), name, self.get_username(), self.get_password()) session.init_log(logger) self.sessions.append(session) return session
def main(): logger = getLogger('controller') logger.info('Starting controller...') config = Config(logger, '/etc/autoshut/controller.ini') mserver = MetricServer(logger, 'http://localhost/', config) logger.info('Controller started.') while True: logger.info('Event loop started.') apply_rules(logger, config.reload().rules, mserver) time.sleep(config.interval_in_seconds)
def __init__(self): self._log = logger.getLogger('TK') dataDir = os.path.expanduser('~')+'/twpData' if not os.path.exists(dataDir): os.mkdir(dataDir) self._timeFormat = "%Y%m%d %H:%M:%S" self.dataFile = os.path.normpath(os.path.join(dataDir,'requests.txt')) self._log.debug('Data file: {0}'.format(self.dataFile))
def __init__(self): self._tokens = {} self._literals = set() self._ignored = set() self._error_tokens = {} self._warning_tokens = {} self.log = logger.getLogger('lexer-factory-%d' % logger.sysid(self)) self.log.info('Initialized lexer factory')
def __init__(self, scenario_name, host_port, config_file, nfpa_class): ''' This class initializes a bottle python webserver on the given host_port, which is passed as host:port! scenario_name String - the name for identifying the scenario host_port String - looks like localhost:8000 nfpa_class NFPA - this is a pointer to main class to be able to access its startPktgenMeasurements function ''' host_port_string_input = host_port #used only line 84 for printing out host_port = host_port.split(":") self.host = host_port[0] self.port = host_port[1] #read config tmp_cfg = rwcf.readConfigFile(config_file) #check whether it was successful if tmp_cfg[0] == True: self.config = tmp_cfg[1] else: print(tmp_cfg[1]) exit(-1) self.config_comment = rwcf.getConfigComments() #instantiate logger self.log = l.getLogger( self.__class__.__name__, self.config['LOG_LEVEL'], self.config['app_start_date'], self.config['LOG_PATH']) self.log.info("### Measurement scenario '" + scenario_name + "' has been" "initiated with Web-GUI ###") self.log.info("NFPA Web interface can be reached under: %s/nfpa" % host_port_string_input) # print("ETL: %s" % self.config['ETL']) #append scenario name to self.config dictionary for later usage self.config['scenario_name'] = scenario_name self.nfpa_class = nfpa_class # print("in config: %s" % self.config['scenario_name']) self._app = Bottle() self._route() # self.note_pic = self._serve_pictures('note.png') self.start()
def __init__(self, parser, database): if not isinstance (database, Database): raise ValueError('The database must be a subclass of Database') if not isinstance (parser, Parser): raise ValueError('The parser must be a subclass of Parser') self.__logger = getLogger(Logger.ANALYSER) self.__parser = parser self.__database = database
def __init__(self): self.log = l.getLogger(self.__class__.__name__, "DEBUG") #fire up underlying network infrastructure topo = NMaaSTopo() #create a remote controller instance c = RemoteController('c0', '127.0.0.1', 6633) # Change the args of GenericTree() to your desired values. You could even get them from command line. net = Mininet(topo=topo, host=CPULimitedHost, link=TCLink, switch=OVSSwitch, controller=c) net.start() # we are 'playing' with non-STP topologies, i.e., there are rings in the topology, so ARP broadcast storm # would arise. If we enable ARP, then it blocks the links, i.e., by blocking some ports, multiple paths will # disappear # Thus, we set here the ARP tables manually for all hosts; TODO: make it more automatic later in the controller number_of_hosts = len(net.hosts) for i, h in enumerate(net.hosts): # print net.hosts[0].params['ip'] # print i,h if i < (number_of_hosts - 1): cmd = "ping -c1 {} &".format(net.hosts[i + 1].params['ip']) # print cmd h.cmd(cmd) else: cmd = "ping -c1 {} &".format(net.hosts[0].params['ip']) # print cmd h.cmd(cmd) # for i in range(1,number_of_hosts): # if ("%02d" %i) == h.mac.split(':')[5]: # continue # h.cmd("arp -s 10.0.0.{} 00:00:00:00:00:") self.log.info("Add default gw to hosts") # for h in net.hosts: # cmd = "ip route add 0.0.0.0/0 dev {}".format(h.defaultIntf()) # self.log.debug(cmd) # h.cmd(cmd) # for s in net.switches: # cmd = "ovs-vsctl set bridge {} stp-enable=true".format(s) # print("Enabling STP on {}".format(s)) # s.cmd(cmd) # net.switches[1].cmd(cmd) CLI(net) net.stop()
def __init__(self): """Initialize the plugin This method should always be called by subclasses as it is required to set up logging etc """ if not hasattr(self, "name"): self.name = self.__class__.__name__ self.log = logger.getLogger(self.name) self.log.setLevel(self.logging_level)
def __init__(self, lexer): self.lexer_object = lexer # Create logger for parser self.log = logger.getLogger('parser-%d' % logger.sysid(self)) # Create ply parser log_wrapper = logger.PlyLoggerWrapping(self.log) self.parser_object = yacc.yacc(module=self, debuglog=log_wrapper, errorlog=log_wrapper, debug=1, outputdir='/tmp', write_tables=False) self.log.info('Created parser; class ID=%d' % logger.sysid(self.__class__))
def __init__(self, basedir, config, dry_run=False): '''Create a MigrationApplier from the given base directory and configuration. The base directory must exist, and the configuration must contain ample information for the adapter to connect to the database. ''' self.dry_run = dry_run self.config = config self.basedir = basedir self.initializePlugin() self.logger = getLogger('MigrationApplier')
def __init__(self, scenario_name, host_port, nfpa_class): ''' This class initializes a bottle python webserver on the given host_port, which is passed as host:port! scenario_name String - the name for identifying the scenario host_port String - looks like localhost:8000 nfpa_class NFPA - this is a pointer to main class to be able to access its startPktgenMeasurements function ''' host_port_string_input = host_port #used only line 84 for printing out host_port = host_port.split(":") self.host = host_port[0] self.port = host_port[1] #read config tmp_cfg = rwcf.readConfigFile("nfpa.cfg") #check whether it was successful if tmp_cfg[0] == True: self.config = tmp_cfg[1] else: print(tmp_cfg[1]) exit(-1) self.config_comment = rwcf.getConfigComments() #instantiate logger self.log = l.getLogger( self.__class__.__name__, self.config['LOG_LEVEL'], self.config['app_start_date'], self.config['LOG_PATH']) self.log.info("### Measurement scenario '" + scenario_name + "' has been" "initiated with Web-GUI ###") self.log.info("NFPA Web interface can be reached under: %s/nfpa" % host_port_string_input) # print("ETL: %s" % self.config['ETL']) #append scenario name to self.config dictionary for later usage self.config['scenario_name'] = scenario_name self.nfpa_class = nfpa_class # print("in config: %s" % self.config['scenario_name']) self._app = Bottle() self._route() # self.note_pic = self._serve_pictures('note.png') self.start()
def __init__(self, max_vm, duts): self.max_vm = max_vm self.duts = duts self.pool = threadpool.ThreadPool(max_vm) self.pool_result = [dict() for _ in duts] self._pool_requests = list() self._pool_executors = dict() self.logger = getLogger("multiple_vm") self.logger.info( "Created MultipleVM instance with %d DUTs and %d VMs" % (len(duts), max_vm))
def register_device_callback(register_result, iothub_uri, device_id, user_context): global kill_received global iotHubClient global IOTHUB_URI global SECURITY_DEVICE_TYPE global PROTOCOL global IOTHUB_DID log = logger.getLogger() log.debug( "Register device callback: " ) log.debug( " register_result = %s" % register_result) log.debug( " iothub_uri = %s" % iothub_uri) log.debug( " user_context = %s" % user_context) IOTHUB_URI = iothub_uri IOTHUB_DID = device_id if iothub_uri: log.debug( "Device successfully registered!" ) iotHubClient = IotHubClient(IOTHUB_URI, IOTHUB_DID, False if SECURITY_DEVICE_TYPE == ProvisioningSecurityDeviceType.X509 else True) # register a method for direct method execution # called with: # iothub-explorer device-method <device-name> rainbow '{"timeInSec":10}' 3600 iotHubClient.registerMethod("rainbow", registeredMethods.directMethod) # register a method for cloud to device (C2D) execution # called with: # iothub-explorer send <device-name> '{"methodName":"message", "payload":{"text":"Hello World!!!", "color":[255,0,0]}}' iotHubClient.registerMethod("message", registeredMethods.cloudMessage) # register callbacks for desired properties expected iotHubClient.registerDesiredProperty("fanspeed", registeredMethods.fanSpeedDesiredChange) iotHubClient.registerDesiredProperty("setvoltage", registeredMethods.voltageDesiredChange) iotHubClient.registerDesiredProperty("setcurrent", registeredMethods.currentDesiredChange) iotHubClient.registerDesiredProperty("activateir", registeredMethods.irOnDesiredChange) while not kill_received: log.debug("reading sensors\n") globals.display.increment(1) sensorData = readSensors() log.debug("debug display\n") debugDisplay(sensorData) log.debug("send data\n") sendDataToHub(sensorData) log.debug("display show\n") globals.display.show() time.sleep(5) else: log.debug( "Device registration failed!" ) log.debug("done..")
def __init__( self, _worker_index=0, _check_period=60.0, _init_base_time_step=20.0, _max_steps=1000000, _batch_size=128, _class_num=10, # must be given _base_dir=None, _host='localhost', _port_base=14200, _s=None, logger_name=None, _eval_step=10): self.No = int(_worker_index) self.check_period = float(_check_period) self.base_time_step = float( _init_base_time_step) # initial , commit update per 20s self.max_steps = int(_max_steps) self.batch_size = int(_batch_size) self.class_num = int(_class_num) # self.base_dir = _base_dir self.host = _host self.port_base = int(_port_base) self.eval_step = _eval_step self.logger = logger.getLogger(logger_name) ########################################################## self.commit_cnt = 0 # record the total commit number self.class_cnt = [0 for _ in xrange(self.class_num)] ## for prediction self.predict_cnt = [0 for _ in xrange(self.class_num)] self.predict_rst = [0 for _ in xrange(self.class_num)] self.eval_rst = [0.0 for _ in xrange(self.class_num + 1) ] # last elem is the overall accuracy # log for the worker self.f_log = open( os.path.join(self.base_dir + 'wk_%d_usp.txt' % (self.No)), 'w') self.f_pre = open( os.path.join(self.base_dir + 'wk_%d_usp_pred.txt' % (self.No)), 'w') # store the parameters self.parameter = [] # a list of parameters, parameters are np.array self.para_shape = [] self.commit_overhead = 0
def __init__(self, config_file): ''' Constructor ''' #check the path to the config_file #dictionary for storing configuration parameters read from config file self._config = {} #read config tmp_cfg = rwcf.readConfigFile(config_file) #check whether it was successful if tmp_cfg[0] == True: self._config = tmp_cfg[1] else: print(tmp_cfg[1]) exit(-1) #create a list of dictionary indexes for easier iterating though data #actually, these are the measured data units/names stored and placed in #gnuplot file as well, therefore iterating through this dictionary eases #the code via not accessing the fields explicitly #sp - sent pps, rb - recv bps, etc. self._config['header_uni'] = [ 'sent_pps', 'recv_pps', 'miss_pps', 'sent_bps', 'recv_bps', 'diff_bps' ] self._config['header_bi'] = [ 'sent_pps_bidir', 'recv_pps_bidir', 'miss_pps_bidir', 'sent_bps_bidir', 'recv_bps_bidir', 'diff_bps_bidir' ] self._config['helper_header'] = ['min', 'avg', 'max'] self.log = l.getLogger(self.__class__.__name__, self._config['LOG_LEVEL'], self._config['app_start_date'], self._config['LOG_PATH']) #create an instance of database helper and store it in config dictionary self._config["dbhelper"] = SQLiteDatabaseAdapter(self._config) # parse config params configSuccess = self.checkConfig() if (configSuccess == -1): return -1 self.calculateTimeLeft() self.createResultsDir() self.assemblePktgenCommand() self.createSymlinksForLuaScripts()
def __init__(self): self.logger = logger.getLogger("*Monitor*") self.rd = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT) # initial state and values for flow in self.get_all_flows(): if not 'scheduled' in flow: flow['scheduled'] = 0 flow['state'] = 'stopped' flow['last_scheduled'] = flow['scheduled'] flow['noact'] = 0
def scan(self, interface='wlan0'): zlog = logger.getLogger() try: scantext = scan(interface) if scantext: self.cells = parse(scantext) zlog.logger.info(self.cells) return True else: return False except Exception as e: zlog.logger.error("Exception %s in WiFi scanner" % str(e)) return False
def send_confirmation_callback(self, message, result, user_context): log = logger.getLogger() log.info( "Confirmation[{0}] received for message with result = {1}".format( user_context, result)) map_properties = message.properties() log.info(" message_id: {0}".format(message.message_id)) log.info(" correlation_id: {0}".format(message.correlation_id)) key_value_pair = map_properties.get_internals() log.info(" Properties: {0}".format(key_value_pair)) self.sendCallbackCount += 1 log.info(" Total calls confirmed: {0}\n".format( self.sendCallbackCount))
def main(ib: IB): try: logger.getLogger().info("Connecting...") ib.connect(config.HOST, config.PORT, clientId=config.CLIENTID) ib.reqMarketDataType(config.DATATYPE.value) except NameError: # got this block from https://groups.io/g/insync/message/4045 self.num_disconnects += 1 print(datetime.datetime.now(), 'Connection error exception', self.num_disconnects) #self.ib.cancelHistoricalData(bars) log.info('Sleeping for 10sec...') ib.disconnect self.ib.sleep(10) ib.connect(config.HOST, config.PORT, clientId=config.CLIENTID) # except OSError: # log.info("main try except OS errror > Connection Failed.") # sys_exit() app = App(ib) app.run()
def __init__(self,name,conn): # class BufferManager(threading.Thread): # def __init__(self,name): super(BufferManager,self).__init__() self.conn = conn self.daemon = True # threading.Thread.__init__(self) self.pool = redis.ConnectionPool(host='localhost',port=6379) self.rds = redis.Redis(connection_pool=self.pool) self.name = name self.pkg_to_save = [] logger.init('./buf.log',logging.INFO) self.logger=logger.getLogger('bm',logging.INFO)
def save(world, player): log = l.getLogger('savegame') log.debug('saving game ...') try: saveFile = gzip.open(os.path.join( EC.EngineConfig.Instance().getPath(), Savegame.NAME), 'w') log.debug('writing data') saveFile.write("%s\n" % (player.toJson(),)) for coord in world.getBlockPositions(): saveFile.write("%s\n" % (world.getBlock(coord).toJson(),)) saveFile.close() except Exception, e: log.error('saving failed: %s' % (str(e),))
def __init__(self, source_name, error_context): self.source_name = source_name self.error_context = error_context self.current_location = Location(self.source_name, 0, 0, 0) self.data = '' # Create logger for lexer self.log = logger.getLogger('lexer-%d' % logger.sysid(self)) # Create ply lexer object log_wrapper = logger.PlyLoggerWrapping(self.log) self.lexer_object = lex.lex(module=self, debuglog=log_wrapper, errorlog=log_wrapper, debug=1) self.log.info('Created lexer; class ID=lexer-class-%d' % logger.sysid(self.__class__))
def __init__(self, crb, serializer, name): self.crb = crb self.skip_setup = False self.serializer = serializer self.ports_info = None self.sessions = [] self.name = name self.logger = getLogger(name) self.session = SSHConnection(self.get_ip_address(), name, self.get_password()) self.session.init_log(self.logger) self.alt_session = SSHConnection(self.get_ip_address(), name + '_alt', self.get_password()) self.alt_session.init_log(self.logger)
def dts_log_testsuite(test_suite, log_handler, test_classname): """ Change to SUITE self logger handler. """ test_suite.logger = getLogger(test_classname) test_suite.logger.config_suite(test_classname) log_handler.config_suite(test_classname, 'dts') dut.logger.config_suite(test_classname, 'dut') tester.logger.config_suite(test_classname, 'tester') try: if tester.it_uses_external_generator(): getattr(tester, 'ixia_packet_gen') tester.ixia_packet_gen.logger.config_suite(test_classname, 'ixia') except Exception as ex: pass
def __init__(self,debug=False): self._log = logger.getLogger('DLD') self._log.debug('Initializing data dwonloader. Pandas version={0}, ibpy version:{1}'.format(pandas.__version__,ib.version)) self.tws = ibConnection() self._dataHandler = _HistDataHandler(self.tws) if debug: self.tws.registerAll(self._debugHandler) self.tws.unregister(self._debugHandler,message.HistoricalData) self._log.debug('Connecting to tws') self.tws.connect() self._timeKeeper = TimeKeeper() # keep track of past requests self._reqId = 1 # current request id
def __init__(self, crb, serializer): super(Dut, self).__init__(crb, serializer) self.NAME = 'dut' self.logger = getLogger(self.NAME) self.session = SSHConnection(self.get_ip_address(), self.NAME, self.get_password()) self.session.init_log(self.logger) self.alt_session = SSHConnection(self.get_ip_address(), self.NAME + '_alt', self.get_password()) self.alt_session.init_log(self.logger) self.number_of_cores = 0 self.tester = None self.cores = [] self.architecture = None self.ports_info = None self.conf = UserConf() self.ports_map = []
def __init__(self, config): ''' Constructor :param config: dictionary of the configration from nfpa.cfg ''' self.config = config if self.config['email_service'].lower() != "true": return 0 self.log = l.getLogger(self.__class__.__name__, self.config['LOG_LEVEL'], self.config['app_start_date'], self.config['LOG_PATH']) # get current timestamp self.st = df.getDateFormat(self.config['app_start_date']) self.SUBJECT = "[NFPA-SERVICE] "