def create_logger_alias(self, parent, name): logger_service = as_internal_node('/services/logger') if logger_service.has_child(name): # get_child returns the actual child, as_node would follow an # alias. trend_alias = logger_service.get_child(name) name_in_use = not isinstance(trend_alias, Alias) if not name_in_use: name_in_use |= as_internal_node( os.path.dirname(trend_alias.node_url)) is not self if name_in_use: raise ENameInUse(trend_alias.as_node_url()) else: trend_alias = Alias() parent_url = as_node(parent).as_node_url() quoted_name = urllib.quote(name, '') trend_alias.configure({ 'name': name, 'parent': logger_service, 'node_url': os.path.join(parent_url, quoted_name, quoted_name) }) return
def setUp(self): DefaultTestFixture.setUp(self) server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server_socket.bind(('localhost', 0)) # Bind to any available port on # localhost. server_socket.listen(5) sockname = server_socket.getsockname() ## # Socket ready to accept "eWebConnect" Alarm messages. self.server_socket = server_socket ## # The "eWebConnect" host. self.sever_address = sockname[0] ## # The "eWebConnect" post. self.server_port = sockname[1] self.new_node_tree() root = as_internal_node('/') self.configure({'name': 'Test Case', 'parent': '/'}) client = EWebConnectAlarmClient() client.configure({ 'name': 'eWebConnect', 'parent': self, 'host': 'localhost', 'port': self.server_port }) as_internal_node('/').start() return
def setUp(self): mpx.service.session.SessionManager.PASSWD_FILE = ( os.path.join(os.path.dirname(__file__), 'passwd.test') ) DefaultTestFixture.setUp(self) self.config = {} lo = [] lr = {} lr['alias'] = 'default' lr['class'] = 'mpx.lib.xmlrpc.XMLRPC_DefaultObject' lr['lifetime'] = 'Request' lo.append(lr) lr = {} lr['alias'] = 'rna_xmlrpc' lr['class'] = 'mpx.lib.xmlrpc.rna_xmlrpc.RNA_XMLRPC_Handler' lr['lifetime'] = 'Runtime' lo.append(lr) config = {} config['name'] = 'XMLRPC_Handler' config['parent'] = None config['module'] = '' config['deployed_objects'] = lo self.handler = XMLRPC_Handler() self.handler.configure(config) root = as_internal_node('/') self.new_node_tree() as_internal_node('/').start() self._session = as_internal_node( '/services/session_manager' ).create('mpxadmin', 'mpxadmin') return
def test_valid_xml(self): self.new_column_a_log() tmp_file = None tmp_file = self.temp_file_name() f = open(tmp_file, 'w') as_internal_node('/').start() stream = as_internal_node( '/services/logger/bogus_log/exporters/periodic_exporter/' 'metermail_formatter').format(self.data) output = '' data = stream.read(1024) while data: output += data data = stream.read(1024) f.write(output) f.close() command = 'xmllint ' + str(tmp_file) stdin, stdout, stderr = os.popen3(command) out = stdout.readlines() err = stderr.readlines() if err: error = '' for e in err: error += e + '\n' pass self.fail('File is not a valid XML file:\n' + error) return
def setUp(self): mpx.service.session.SessionManager.PASSWD_FILE = (os.path.join( os.path.dirname(__file__), 'passwd.test')) DefaultTestFixture.setUp(self) self.config = {} lo = [] lr = {} lr['alias'] = 'default' lr['class'] = 'mpx.lib.xmlrpc.XMLRPC_DefaultObject' lr['lifetime'] = 'Request' lo.append(lr) lr = {} lr['alias'] = 'rna_xmlrpc' lr['class'] = 'mpx.lib.xmlrpc.rna_xmlrpc.RNA_XMLRPC_Handler' lr['lifetime'] = 'Runtime' lo.append(lr) config = {} config['name'] = 'XMLRPC_Handler' config['parent'] = None config['module'] = '' config['deployed_objects'] = lo self.handler = XMLRPC_Handler() self.handler.configure(config) root = as_internal_node('/') self.new_node_tree() as_internal_node('/').start() self._session = as_internal_node('/services/session_manager').create( 'mpxadmin', 'mpxadmin') return
def setUp(self): DefaultTestFixture.setUp(self) server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM) server_socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) server_socket.bind(('localhost',0)) # Bind to any available port on # localhost. server_socket.listen(5) sockname = server_socket.getsockname() ## # Socket ready to accept "eWebConnect" Alarm messages. self.server_socket = server_socket ## # The "eWebConnect" host. self.sever_address = sockname[0] ## # The "eWebConnect" post. self.server_port = sockname[1] self.new_node_tree() root = as_internal_node('/') self.configure({'name':'Test Case', 'parent':'/'}) client = EWebConnectAlarmClient() client.configure({'name':'eWebConnect', 'parent':self, 'host':'localhost', 'port':self.server_port}) as_internal_node('/').start() return
def test_valid_xml(self): self.new_column_a_log() tmp_file = None tmp_file = self.temp_file_name() f = open(tmp_file, 'w') as_internal_node('/').start() stream = as_internal_node( '/services/logger/bogus_log/exporters/periodic_exporter/' 'metermail_formatter' ).format(self.data) output = '' data = stream.read(1024) while data: output += data data = stream.read(1024) f.write(output) f.close() command = 'xmllint ' + str(tmp_file) stdin, stdout,stderr = os.popen3(command) out = stdout.readlines() err = stderr.readlines() if err: error = '' for e in err: error += e + '\n' pass self.fail('File is not a valid XML file:\n' + error) return
def test_ElementalNodeType(self): assert ElementalNodeType(as_internal_node('/')).edt_dump() == { 'edt__typ': 'node', 'path': '/' } ne = edt_decode({'edt__typ': 'node', 'path': '/services/time/local'}) assert isinstance(ne,ElementalNodeType) assert ne.edt__typ == 'node' assert ne.path == '/services/time/local' re = {'edt__typ': 'node', 'path': '/'} assert edt_encode(as_internal_node('/')) == re assert edt_encode(as_node('/')) == re return
def test_ElementalNodeType(self): assert ElementalNodeType(as_internal_node('/')).edt_dump() == { 'edt__typ': 'node', 'path': '/' } ne = edt_decode({'edt__typ': 'node', 'path': '/services/time/local'}) assert isinstance(ne, ElementalNodeType) assert ne.edt__typ == 'node' assert ne.path == '/services/time/local' re = {'edt__typ': 'node', 'path': '/'} assert edt_encode(as_internal_node('/')) == re assert edt_encode(as_node('/')) == re return
def destroy_logger_alias(self): logger_service = as_internal_node('/services/logger') if logger_service.has_child(self.name): trend_alias = logger_service.get_child(self.name) # get_child returns the actual child, as_node would follow an # alias. if isinstance(trend_alias, Alias): try: if as_internal_node(os.path.dirname( trend_alias.node_url)) is self: trend_alias.prune() except ENoSuchName: trend_alias.prune() return
def destroy_logger_alias(self): logger_service = as_internal_node('/services/logger') if logger_service.has_child(self.name): trend_alias = logger_service.get_child(self.name) # get_child returns the actual child, as_node would follow an # alias. if isinstance(trend_alias, Alias): try: if as_internal_node( os.path.dirname(trend_alias.node_url) ) is self: trend_alias.prune() except ENoSuchName: trend_alias.prune() return
def configure(self, config): self.setattr('name', config.get('name', 'Trend Manager')) self.setattr('logger_url', config.get('logger_url', '/services/logger')) self.secured = as_internal_node("/services").secured super(TrendManager, self).configure(config) return
def read_object(self): if self.context is None: nodeurl = self.statedict['nodeurl'] print '%s read_object for %s' % (self, nodeurl) try: context = as_internal_node(nodeurl) if isinstance(context, DeferredObject): raise KeyError(nodeurl) self.context = context except KeyError: print '\tcreating node' super(PickleableConfigurableNode, self).__setstate__(self.statedict) else: print '\tnode already existed, setting up' contextstate = self.statedict['state'] # Ugly inefficient hack to work with ReloadableSingleton, # which wraps the ROOT node and delegates getattr and # setattr to the wrapped node. This, of course, fails # if the instance's __dict__ is used directly to modify # the state. for name, value in contextstate.items(): setattr(self.context, name, value) nodeconfig = self.statedict['nodeconfig'] print '\tcofigure node with %s' % nodeconfig try: self.context.configure(nodeconfig) except: msglog.exception() return self.context
def setUp(self): DefaultTestFixture.setUp(self) self.__event_updated_values = {} self.new_node_tree() root = as_internal_node('/') self._cov_counter = 0 GetException().configure({'name':'exception', 'parent':'/services'}) SUBSCRIPTION_MANAGER.configure({'debug':0, '_normal_pool_size':2, '_slow_pool_size':2, '_prime_pool_size':2, '_minimum_poll_interval':0.001, '_slow_poll_threshold':0.500, } ) for i in range(0,10): f = FastNode() f.configure({'parent':root, 'name':"FastNode-%03d"%i}) s = SlowNode() s.configure({'parent':root, 'name':"SlowNode-%03d"%i}) e = ErrorNode() e.configure({'parent':root, 'name':"ErrorNode-%03d"%i}) b = BatchNode(i & 1) b.configure({'parent':root, 'name':"BatchNode-%03d"%i}) root.start() return
def _get_reference(self): if not is_node(self._reference): try: self._reference = as_internal_node(self._reference) except: self._reference = None return self._reference
def _local_setup(self): self.new_node_tree() self.root = as_internal_node('/') self._dm = GARBAGE_COLLECTOR self._dm.debug = debug self._dm.start() return
def interrogate_nodes(root, get_state=1): from mpx.lib.node import as_node, as_internal_node #print 'enter interrogate_node' if type(root) == types.StringType: root = as_internal_node(root) it = Iterator(root, 0) node_stack = [] node_interrogation = [] #COUNTER = 0 while it.has_more(): node = it.get_next_node() #while (len(node_stack) and node_stack[len(node_stack) - 1] is not node.parent): #node_stack.pop() #node_stack.append(node) config = None state = '' try: config = node.configuration() for k in config.keys(): value = config[k] if type(value) == types.StringType: config[k] = _encode_xml(value) state = current_state(node) except Exception, e: config = {} config['__state__'] = 'Error' ## Should report error? config['__state__'] = str(state) node_interrogation.append(config)
def setUp(self): DefaultTestFixture.setUp(self) self.__event_updated_values = {} self.new_node_tree() root = as_internal_node('/') self._cov_counter = 0 GetException().configure({'name': 'exception', 'parent': '/services'}) SUBSCRIPTION_MANAGER.configure({ 'debug': 0, '_normal_pool_size': 2, '_slow_pool_size': 2, '_prime_pool_size': 2, '_minimum_poll_interval': 0.001, '_slow_poll_threshold': 0.500, }) for i in range(0, 10): f = FastNode() f.configure({'parent': root, 'name': "FastNode-%03d" % i}) s = SlowNode() s.configure({'parent': root, 'name': "SlowNode-%03d" % i}) e = ErrorNode() e.configure({'parent': root, 'name': "ErrorNode-%03d" % i}) b = BatchNode(i & 1) b.configure({'parent': root, 'name': "BatchNode-%03d" % i}) root.start() return
def interrogate_nodes(root, get_state=1): from mpx.lib.node import as_node,as_internal_node #print 'enter interrogate_node' if type(root) == types.StringType: root = as_internal_node(root) it = Iterator(root, 0) node_stack = [] node_interrogation = [] #COUNTER = 0 while it.has_more(): node = it.get_next_node() #while (len(node_stack) and node_stack[len(node_stack) - 1] is not node.parent): #node_stack.pop() #node_stack.append(node) config = None state='' try: config = node.configuration() for k in config.keys(): value = config[k] if type(value) == types.StringType: config[k] = _encode_xml(value) state = current_state(node) except Exception, e: config = {} config['__state__'] = 'Error' ## Should report error? config['__state__'] = str(state) node_interrogation.append(config)
def configure(self, config): self.setattr('path', config.get('path', '/scheduleconfig')) self.secured = as_boolean(as_internal_node("/services").secured) security_manager = config.get('security_manager', '/services/Security Manager') self.setattr('security_manager', security_manager) super(ScheduleConfigurator, self).configure(config)
def test_1_empty(self): system.configure(os.path.join(properties.ROOT, 'mpx/system/_test_1_empty.xml')) root = as_internal_node('/') if root.exception is None or isinstance(root.exception, SAXException): return raise root.exception
def _inform_garbage_collector(list): try: gc = as_internal_node('/services/garbage_collector') gc.set_faillist(list) except: msglog.exception(msglog.types.INFO) return
def setUp(self): DefaultTestFixture.setUp(self) self.root = as_internal_node('/') self.input = CompositeNode() self.output = CompositeNode() self.input.configure({'parent': self.root, 'name': 'input'}) self.output.configure({'parent': self.root, 'name': 'output'}) self.input_value = SimpleValue() self.output_value = SimpleValue() self.input_value.configure({'parent': self.input, 'name': 'value'}) self.output_value.configure({'parent': self.output, 'name': 'value'}) self.aliases = Aliases() self.aliases.configure({'parent': self.root, 'name': 'aliases'}) self.alias_input = Alias() self.alias_input.configure({ 'parent': self.aliases, 'name': 'input', 'node_url': '/input/value' }) self.alias_output = Alias() self.alias_output.configure({ 'parent': self.aliases, 'name': 'output', 'node_url': '/output/value' }) self.input_value.set(1) self.output_value.set(0) return
def setUp(self): DefaultTestFixture.setUp(self) self.__event_updated_values = {} self.new_node_tree() root = as_internal_node("/") self._cov_counter = 0 GetException().configure({"name": "exception", "parent": "/services"}) SUBSCRIPTION_MANAGER.configure( { "debug": 0, "_normal_pool_size": 2, "_slow_pool_size": 2, "_prime_pool_size": 2, "_minimum_poll_interval": 0.001, "_slow_poll_threshold": 0.500, } ) for i in range(0, 10): f = FastNode() f.configure({"parent": root, "name": "FastNode-%03d" % i}) s = SlowNode() s.configure({"parent": root, "name": "SlowNode-%03d" % i}) e = ErrorNode() e.configure({"parent": root, "name": "ErrorNode-%03d" % i}) b = BatchNode(i & 1) b.configure({"parent": root, "name": "BatchNode-%03d" % i}) root.start() return
def read_object(self): if self.context is None: nodeurl = self.statedict['nodeurl'] print '%s read_object for %s' % (self, nodeurl) try: context = as_internal_node(nodeurl) if isinstance(context, DeferredObject): raise KeyError(nodeurl) self.context = context except KeyError: print '\tcreating node' super(PickleableConfigurableNode, self).__setstate__(self.statedict) else: print '\tnode already existed, setting up' contextstate = self.statedict['state'] # Ugly inefficient hack to work with ReloadableSingleton, # which wraps the ROOT node and delegates getattr and # setattr to the wrapped node. This, of course, fails # if the instance's __dict__ is used directly to modify # the state. for name,value in contextstate.items(): setattr(self.context, name, value) nodeconfig = self.statedict['nodeconfig'] print '\tcofigure node with %s' % nodeconfig try: self.context.configure(nodeconfig) except: msglog.exception() return self.context
def new_column_a_log(self): self.new_node_tree() logger = BogusLogger() logger.configure({'name':'logger', 'parent':as_internal_node('/services')}) log = BogusLog() log.configure({'name':'bogus_log', 'parent':logger}) columns = BogusColumns() columns.configure({'name':'columns', 'parent':log}) timestamp_column = BogusColumn() timestamp_column.configure({'name':'timstamp', 'parent':columns}) column_a = BogusColumn() column_a.configure({'name':'column-A', 'parent':columns}) column_a_decorator = MeterMailColumnDecorator() column_a_decorator.configure({'name':'metermail_column_decorator', 'parent':column_a, 'mmafmt_channel_id':'column-A', 'mmafmt_channel_label':'kW a', }) exporters = BogusExporters() exporters.configure({'name':'exporters', 'parent':log}) periodic_exporter = BogusPeriodicExporter() periodic_exporter.configure({'name':'periodic_exporter', 'parent':exporters}) formatter = MeterMailFormatter() formatter.configure({'name':'metermail_formatter', 'parent':periodic_exporter, 'device_name':'device_name', 'data_recorder_id':'data_recorder_id', 'data_recorder_label':'data_recorder_label', }) return
def test_1_empty(self): system.configure( os.path.join(properties.ROOT, 'mpx/system/_test_1_empty.xml')) root = as_internal_node('/') if root.exception is None or isinstance(root.exception, SAXException): return raise root.exception
def configure(self, config): # The request_path tells the http_server which url requests # should be sent to this handler. It can be a regular expression # as defined in the documentation for the python re module. set_attribute(self, 'request_path', '/nodebrowser', config) self.secured = as_internal_node("/services").secured RequestHandler.configure(self, config)
def configure(self, config): self.setattr('path', config.get('path','/scheduleconfig')) self.secured = as_boolean(as_internal_node("/services").secured) security_manager = config.get('security_manager', '/services/Security Manager') self.setattr('security_manager', security_manager) super(ScheduleConfigurator, self).configure(config)
def test_dumps_node(self): self.assert_eq( dumps((as_internal_node('/'), )), "<params><param><value><struct><member>" "<name>path</name><value><string>/</string></value></member>" "<member><name>edt__typ</name><value><string>node</string></value>" "</member></struct></value></param></params>") return
def configure(self, config): # The request_path tells the http_server which url requests # should be sent to this handler. It can be a regular expression # as defined in the documentation for the python re module. set_attribute(self, 'request_path', '/backup', config) self.secured = as_internal_node("/services").secured RequestHandler.configure(self, config) return
def configure(filename=properties.CONFIGURATION_FILE): try: root = _configure(filename) except Exception, e: import traceback traceback.print_exc() root = as_internal_node('/') root.exception = e msglog.exception()
def __init__(self, source_node, timeout=960): EventConsumerAbstract.__init__(self) self.__node = as_internal_node(source_node) self.__cond = Condition() self.__event = None self.__sched = None #scheduled action to unsubscribe the point self.__timeout = timeout #number of seconds to maintain subscription self._event_received = False return
def configure(self, config): self.setattr('path', config.get('path', '/trendmanager')) self.setattr('name', config.get('name', 'Trend Manager Handler')) self.setattr( 'manager', as_deferred_node(config.get('manager', '/services/Trend Manager'))) self.secured = as_internal_node("/services").secured super(TrendManagerHandler, self).configure(config) return
def start(self): if self.__running: return super(PeriodicLogTrendAdapter, self).start() self.periodic_log = as_internal_node('/services/logger').get_child( self.name) self.log_url = self.periodic_log.as_node_url() self.__running = True return
def test_dumps_node(self): self.assert_eq( dumps((as_internal_node('/'),)), "<params><param><value><struct><member>" "<name>path</name><value><string>/</string></value></member>" "<member><name>edt__typ</name><value><string>node</string></value>" "</member></struct></value></param></params>" ) return
def start(self): if self.__running: return super(PeriodicLogTrendAdapter, self).start() self.periodic_log = as_internal_node( '/services/logger' ).get_child(self.name) self.log_url = self.periodic_log.as_node_url() self.__running = True return
def __init__(self): """method __init__(): Parameter 'ir_nodes' is a list (not dictionary) of one or more valid InterfaceRouteNode subclass instance refs.""" Thread.__init__(self) self._poll_obj = None self.go = 1 self._com1 = as_internal_node('/interfaces/com1') self._com1.open() self._com1_file = self._com1.file self._com1_fd = self._com1_file.fileno() self._com2 = as_internal_node('/interfaces/com2') self._com2.open() self._com2_file = self._com2.file self._com2_fd = self._com2_file.fileno()
def configure(self, config): self.setattr('path', config.get('path','/trendmanager')) self.setattr('name', config.get('name','Trend Manager Handler')) self.setattr( 'manager', as_deferred_node(config.get('manager','/services/Trend Manager')) ) self.secured = as_internal_node("/services").secured super(TrendManagerHandler, self).configure(config) return
def handle_logout(self, request): sm = as_internal_node("/services/session_manager") cookies = [Cookie("CUSER", "EXPIRED")] session_id = request.get_cookie("SID") if session_id: sm.destroy(session_id) cookies.append(Cookie("CRYPT", "EXPIRED")) cookies.append(Cookie("SID", "EXPIRED")) self.assign_cookies(cookies, request) self.handle_form(request, "information", "You have logged out.")
def test_monitor_from_methods_and_start(self): self.add_dm() monitor1 = self.dm.monitor_from_kw(UniqueDeviceIdentifier, device_class='udi') monitor2 = self.dm.monitor_from_kw(UniqueDeviceIdentifier, device_class='udi') self.assert_comparison('monitor2', 'is', 'monitor1') monitor_1_1 = self.dm.monitor_from_kw(UniqueTestIdentifier, device_class='test', port='/interfaces/com1', address=1) monitor_1_2 = self.dm.monitor_from_kw(UniqueTestIdentifier, device_class='test', port='/interfaces/com1', address=2) self.assert_comparison('monitor_1_1', 'is not', 'monitor_1_2') monitor_2_1 = self.dm.monitor_from_udi( UniqueTestIdentifier( device_class='test', port='/interfaces/com2', address=1 ) ) monitor_2_2 = self.dm.monitor_from_udi( UniqueTestIdentifier( device_class='test', port='/interfaces/com2', address=2 ) ) self.assert_comparison('monitor_2_1', 'is not', 'monitor_2_2') instance_list = [monitor1, monitor_1_1, monitor_1_2, monitor_2_1, monitor_2_2] node_list = self.dm.dynamic_container().children_nodes() while node_list: node = node_list.pop() self.assert_comparison('node', 'in', 'instance_list') instance_list.remove(node) self.assert_comparison('instance_list', '==', '[]') # Start will cause the monitors to "register" with the Device Manager. as_internal_node('/').start() return
def start(self): try: self._pdo_lock.acquire() try: if self.__running: return self.__running = True self._trendconfig = PersistentDictionary(filename(self), encode=None, decode=None) if not self._trendconfig: pdodata = PersistentDataObject(self, dmtype=GC_NEVER) if os.path.exists(pdodata.filename()): msglog.log('broadway', msglog.types.INFO, "Migrating previous trend data") pdodata.trends = {} pdodata.load() self._trendconfig.update(pdodata.trends) del (pdodata) finally: self._pdo_lock.release() super(TrendManager, self).start() self.logger = node.as_internal_node(self.logger_url) if self.has_child('trends'): self.trends = self.get_child('trends') else: self.trends = CompositeNode() self.trends.configure({'parent': self, 'name': 'trends'}) self.trends.start() corrupt_trends = [] for trendname, trenddump in self._trendconfig.items(): msg = "Loading trend: %s" % trendname msglog.log('trendmanager', msglog.types.INFO, msg) try: trend = unmarshal(trenddump) except: corrupt_trends.append(trendname) msg = "Failed to load trend: %s" % trendname msglog.log('trendmanager', msglog.types.ERR, msg) msglog.exception(prefix='Handled') for trendname in corrupt_trends: try: msg = "Deleting trend information: %s" % trendname msglog.log('trendmanager', msglog.types.INFO, msg) self._delete_trend_configuration(trendname) if self.trends.has_child(trendname): trend = self.trends.get_child(trendname) trend.prune(force=True) except: msglog.exception(prefix='Handled') except: self.__running = False raise return
def test_timestamp_exception(self): self.new_column_a_log() as_internal_node('/').start() formatter = as_internal_node( '/services/logger/bogus_log/exporters/periodic_exporter/' 'metermail_formatter') data = [] for x in range(1, 10): entry = {} entry['column-A'] = x data.append(entry) try: xml = formatter.format(data) while xml.read(100): pass self.fail( 'If no timestamp field it should throw an EIncompatiableFormat' ) except EIncompatiableFormat: pass
def merge_preferences(self, preferences): try: default_preferences = copy.deepcopy(TrendBase.DEFAULT_PREFERENCES) if self.log_url: default_preferences["title"] = as_internal_node( self.log_url).name self.merge_container(preferences, default_preferences) except: msglog.exception() raise return preferences
def start(self): self.debug_print(1, 'start()') if not self.running: self.running = 1 self.__status = -1 # If Config has disabled Internal Modem Server, try to get a ref to the # ComIfRouteNode at the given rzhost_master_path (in nodetree): port_node = None self.debug_print(1, 'rzhost_master_path = %s', self.rzhost_master_path) if self.QA != 0: try: com_if_node = as_internal_node(self.rzhost_master_path) except ENoSuchName, segment: msglog.log('RznetNode', msglog.types.ERR, ('Failed to find' ' InterfaceRouterNode object' ' at %s, at segment %s!' ' Pass-through will not run.') % (self.rzhost_master_path, segment) ) else: port_node = com_if_node.parent self.debug_print(1, 'port_node = %s', str(port_node)) # Create/init thread that runs the whole shootin' match. Pass in # desired rznet_addr: self.line_handler = RznetThread(self.parent, self.rzhost_slave_port, port_node, self.QA, self.rznet_addr, self) # applies to ALL devices self.line_handler.def_max_dev_subscrs = self.def_max_dev_subscrs # Discover kids in start() rather than in configure, because # creating nodes in configure could conceivably confuse the # framework (ie should it attempt to config the new nodes?). # discover_children() explicitly configs each child node. if self.parent.as_node_url() != '/services/control': #under com self.discover_children() #need to kick start old style nodes #next, start the line handler thread self.line_handler.start(); # opens RS485 file object, and slave and # cmd sockets # @fixme HACK to wait for the line_handler thread to init. self.line_handler._internal_lock.acquire() self.line_handler._internal_lock.release() # @fixme END HACK to wait for the line_handler thread to init. self.rznet_addr = self.line_handler.get_addrs()[0] # get actual addr from ldisc: self.debug_print(1, 'ldisc has addr %d.', self.rznet_addr) CompositeNode.start(self) self.line_handler.broadcast_update_request() #send this out AFTER all bound_proxies are started self.__status = 1
def test_timestamp_exception(self): self.new_column_a_log() as_internal_node('/').start() formatter = as_internal_node( '/services/logger/bogus_log/exporters/periodic_exporter/' 'metermail_formatter' ) data =[] for x in range(1,10): entry = {} entry['column-A'] = x data.append(entry) try: xml = formatter.format(data) while xml.read(100): pass self.fail( 'If no timestamp field it should throw an EIncompatiableFormat' ) except EIncompatiableFormat: pass
def merge_preferences(self, preferences): try: default_preferences = copy.deepcopy(TrendBase.DEFAULT_PREFERENCES) if self.log_url: default_preferences["title"] = as_internal_node( self.log_url ).name self.merge_container(preferences, default_preferences) except: msglog.exception() raise return preferences
def start(self): self.debug_print(1, 'start()') if not self.running: self.running = 1 self.__status = -1 # If Config has disabled Internal Modem Server, try to get a ref to the # ComIfRouteNode at the given rzhost_master_path (in nodetree): port_node = None self.debug_print(1, 'rzhost_master_path = %s', self.rzhost_master_path) if self.QA != 0: try: com_if_node = as_internal_node(self.rzhost_master_path) except ENoSuchName, segment: msglog.log('RznetNode', msglog.types.ERR, ('Failed to find' ' InterfaceRouterNode object' ' at %s, at segment %s!' ' Pass-through will not run.') % (self.rzhost_master_path, segment)) else: port_node = com_if_node.parent self.debug_print(1, 'port_node = %s', str(port_node)) # Create/init thread that runs the whole shootin' match. Pass in # desired rznet_addr: self.line_handler = RznetThread(self.parent, self.rzhost_slave_port, port_node, self.QA, self.rznet_addr, self) # applies to ALL devices self.line_handler.def_max_dev_subscrs = self.def_max_dev_subscrs # Discover kids in start() rather than in configure, because # creating nodes in configure could conceivably confuse the # framework (ie should it attempt to config the new nodes?). # discover_children() explicitly configs each child node. if self.parent.as_node_url() != '/services/control': #under com self.discover_children() #need to kick start old style nodes #next, start the line handler thread self.line_handler.start() # opens RS485 file object, and slave and # cmd sockets # @fixme HACK to wait for the line_handler thread to init. self.line_handler._internal_lock.acquire() self.line_handler._internal_lock.release() # @fixme END HACK to wait for the line_handler thread to init. self.rznet_addr = self.line_handler.get_addrs()[ 0] # get actual addr from ldisc: self.debug_print(1, 'ldisc has addr %d.', self.rznet_addr) CompositeNode.start(self) self.line_handler.broadcast_update_request( ) #send this out AFTER all bound_proxies are started self.__status = 1
def create_logger_alias(self, parent, name): logger_service = as_internal_node('/services/logger') if logger_service.has_child(name): # get_child returns the actual child, as_node would follow an # alias. trend_alias = logger_service.get_child(name) name_in_use = not isinstance(trend_alias, Alias) if not name_in_use: name_in_use |= as_internal_node( os.path.dirname(trend_alias.node_url) ) is not self if name_in_use: raise ENameInUse(trend_alias.as_node_url()) else: trend_alias = Alias() parent_url = as_node(parent).as_node_url() quoted_name = urllib.quote(name,'') trend_alias.configure( {'name':name, 'parent':logger_service, 'node_url':os.path.join(parent_url, quoted_name, quoted_name)} ) return
def start(self): try: self._pdo_lock.acquire() try: if self.__running: return self.__running = True self._trendconfig = PersistentDictionary(filename(self), encode=None, decode=None) if not self._trendconfig: pdodata = PersistentDataObject(self, dmtype=GC_NEVER) if os.path.exists(pdodata.filename()): msglog.log("broadway", msglog.types.INFO, "Migrating previous trend data") pdodata.trends = {} pdodata.load() self._trendconfig.update(pdodata.trends) del (pdodata) finally: self._pdo_lock.release() super(TrendManager, self).start() self.logger = node.as_internal_node(self.logger_url) if self.has_child("trends"): self.trends = self.get_child("trends") else: self.trends = CompositeNode() self.trends.configure({"parent": self, "name": "trends"}) self.trends.start() corrupt_trends = [] for trendname, trenddump in self._trendconfig.items(): msg = "Loading trend: %s" % trendname msglog.log("trendmanager", msglog.types.INFO, msg) try: trend = unmarshal(trenddump) except: corrupt_trends.append(trendname) msg = "Failed to load trend: %s" % trendname msglog.log("trendmanager", msglog.types.ERR, msg) msglog.exception(prefix="Handled") for trendname in corrupt_trends: try: msg = "Deleting trend information: %s" % trendname msglog.log("trendmanager", msglog.types.INFO, msg) self._delete_trend_configuration(trendname) if self.trends.has_child(trendname): trend = self.trends.get_child(trendname) trend.prune(force=True) except: msglog.exception(prefix="Handled") except: self.__running = False raise return
def _megatron_hwid_proc(): # @FIXME: Need to check the license manager, and then return # "NBM-2500" or "NBM-5000". megatron_models = ['NBM-2500', 'NBM-5000'] try: from mpx.lib.node import as_internal_node hwid = 'NBM-' + str(as_internal_node('/interfaces/AI1').coprocessor.udi[3:7]) if hwid not in megatron_models: # return the default hwid, i.e., NBM5000 hwid = 'NBM-5000' except Exception, error: # Do nothing, can't use msglog here ... # return the default hwid, i.e., NBM5000 hwid = 'NBM-5000'
def test_monitor_from_methods_and_start(self): self.add_dm() monitor1 = self.dm.monitor_from_kw(UniqueDeviceIdentifier, device_class='udi') monitor2 = self.dm.monitor_from_kw(UniqueDeviceIdentifier, device_class='udi') self.assert_comparison('monitor2', 'is', 'monitor1') monitor_1_1 = self.dm.monitor_from_kw(UniqueTestIdentifier, device_class='test', port='/interfaces/com1', address=1) monitor_1_2 = self.dm.monitor_from_kw(UniqueTestIdentifier, device_class='test', port='/interfaces/com1', address=2) self.assert_comparison('monitor_1_1', 'is not', 'monitor_1_2') monitor_2_1 = self.dm.monitor_from_udi( UniqueTestIdentifier(device_class='test', port='/interfaces/com2', address=1)) monitor_2_2 = self.dm.monitor_from_udi( UniqueTestIdentifier(device_class='test', port='/interfaces/com2', address=2)) self.assert_comparison('monitor_2_1', 'is not', 'monitor_2_2') instance_list = [ monitor1, monitor_1_1, monitor_1_2, monitor_2_1, monitor_2_2 ] node_list = self.dm.dynamic_container().children_nodes() while node_list: node = node_list.pop() self.assert_comparison('node', 'in', 'instance_list') instance_list.remove(node) self.assert_comparison('instance_list', '==', '[]') # Start will cause the monitors to "register" with the Device Manager. as_internal_node('/').start() return
def _megatron_hwid_proc(): # @FIXME: Need to check the license manager, and then return # "NBM-2500" or "NBM-5000". megatron_models = ['NBM-2500', 'NBM-5000'] try: from mpx.lib.node import as_internal_node hwid = 'NBM-' + str( as_internal_node('/interfaces/AI1').coprocessor.udi[3:7]) if hwid not in megatron_models: # return the default hwid, i.e., NBM5000 hwid = 'NBM-5000' except Exception, error: # Do nothing, can't use msglog here ... # return the default hwid, i.e., NBM5000 hwid = 'NBM-5000'