def run_mergeCoaddDetections(): process = pipeline.getProcessInstance("setup_mergeCoaddDetections") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") nscript = vars.remove('nscripts') filters = vars.remove("FILTERS").split(',') for num in range(int(nscript)): script = workdir + "/08-mergeCoaddDetections/scripts/patches_all.txt_%04d.sh" % num vars.put("CUR_SCRIPT", script) pipeline.createSubstream("mergeCoaddDetectionsFilter", num, vars)
def run_jointcalCoadd(): process = pipeline.getProcessInstance("setup_jointcalCoadd") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") filters = vars.remove("FILTERS").split(',') num = 0 for filt in filters: nscript = vars.remove('n' + filt + 'scripts') for i in range(1, int(nscript) + 1): script = workdir + "/05-jointcalCoadd/scripts/%s/patches_%03d.sh" % ( filt, i) vars.put("CUR_SCRIPT", script) pipeline.createSubstream("jointcalCoaddFilter", num, vars) num += 1
def run_singleFrameDriver(): process = pipeline.getProcessInstance("setup_singleFrameDriver") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") filters = vars.remove("FILTERS").split(',') num = 0 for filt in filters: nscript = vars.remove('n' + filt + 'scripts') for i in range(1, int(nscript) + 1): script = workdir + "/02-singleFrameDriver/scripts/%s/visit_%03d_script.sh" % ( filt, i) vars.put("CUR_SCRIPT", script) pipeline.createSubstream("singleFrameDriverFilter", num, vars) num += 1
def run_makeFpSummary(): process = pipeline.getProcessInstance("setup_makeFpSummary") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") filters = vars.remove("FILTERS").split(',') num = 0 for filt in filters: nscript = vars.remove('n' + filt + 'scripts') for i in range(1, int(nscript) + 1): script = workdir + "/02p5-makeFpSummary/scripts/%s/visit_makeFpSummary_%03d_script.sh" % ( filt, i) vars.put("MAKEFP_SCRIPT", script) pipeline.createSubstream("makeFpSummaryFilter", num, vars) num += 1
def run_measureCoaddSources(): process = pipeline.getProcessInstance("setup_measureCoaddSources") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") filters = vars.remove("FILTERS").split(',') num = 0 for filt in filters: nscript = vars.remove('n' + filt + 'scripts') for i in range(int(nscript)): script = workdir + "/09-measureCoaddSources/scripts/%s/patches_%s.txt_%05d.sh" % \ (filt, filt, i) vars.put("CUR_SCRIPT", script) pipeline.createSubstream("measureCoaddSourcesFilter", num, vars) num += 1
def run_mergeCoaddMeasurements(): process = pipeline.getProcessInstance("setup_mergeCoaddMeasurements") vars = HashMap(process.getVariables()) workdir = vars.remove("WORK_DIR") nscript = vars.remove('nscripts') for num in range(int(nscript)): script = workdir + "/10-mergeCoaddMeasurements/scripts/patches_all.txt_%05d.sh" % num vars.put("CUR_SCRIPT", script) pipeline.createSubstream("mergeCoaddMeasurementsFilter", num, vars)
def updateAttributes(self, context, configurationAttributes): print "Idp extension. Method: updateAttributes" attributeContext = context.getAttributeContext() customAttributes = HashMap() customAttributes.putAll(attributeContext.getIdPAttributes()) # Remove givenName attribute customAttributes.remove("givenName") # Update surname attribute if customAttributes.containsKey("sn"): customAttributes.get("sn").setValues( ArrayList(Arrays.asList(StringAttributeValue("Dummy")))) # Set updated attributes attributeContext.setIdPAttributes(customAttributes.values()) return True
def startScan(self, e): print "Starting scan" self.maxValue = 0.0 self.intensityData.setMinMaxX(1.1 * min(self.xvals) - 2, 1.1 * max(self.xvals) + 2) self.intensityData.setMinMaxY(1.1 * min(self.yvals) - 2, 1.1 * max(self.yvals) + 2) self.intensityData.setZero() self.intensityPlot.refreshGraphJPanel() paramMap = HashMap() for i in self.propDevice: paramMap[i[1]] = i[2].text paramMap['SlitInitial'] = self.scan.xOne1 #the harp initial position has max x' and first swipe will go backwards paramMap['HarpInitial'] = str( float(self.scan.xTwo1) + float(self.scan.dXTwo)) paramMap['SlitStepSize'] = self.scan.xOneStep paramMap['HarpStepSlitSize'] = self.scan.sps paramMap['HarpSpan'] = self.scan.dXTwo paramMap['HarpStepSize'] = self.scan.xTwoStep paramMap['RepRate'] = str(self.waveForm.repRate) paramMap['SlitSteps'] = self.scan.nOneStep paramMap['Speed'] = '2.5' paramMap['StopTimeout'] = '120000' paramMap['SystemType'] = self.propMisc[1][1] paramMap['DataPath'] = self.propMisc[0][1] signal = paramMap.remove('Signal') i = 1 for s in signal.split(','): paramMap['Signal' + str(i)] = s.strip() i = i + 1 scanner = Scanner(self.updateScanStatus, self.dataListener) scanner.runAsync(paramMap) scanner.setPaused(False) self.scanner = scanner print str(paramMap)
class PlayerPresenceManager(Subject): """ generated source for class PlayerPresenceManager """ monitoredPlayers = Map() class PlayerPresenceChanged(Event): """ generated source for class PlayerPresenceChanged """ class PlayerPresenceAdded(Event): """ generated source for class PlayerPresenceAdded """ class PlayerPresenceRemoved(Event): """ generated source for class PlayerPresenceRemoved """ @classmethod def isDifferent(cls, a, b): """ generated source for method isDifferent """ return not Objects == a, b INFO_PING_PERIOD_IN_SECONDS = 1 class PresenceMonitor(Thread): """ generated source for class PresenceMonitor """ def run(self): """ generated source for method run """ while True: try: Thread.sleep(self.INFO_PING_PERIOD_IN_SECONDS) except InterruptedException as e: e.printStackTrace() for key in keys: if presence == None: continue if presence.getStatusAge() > self.INFO_PING_PERIOD_IN_SECONDS * 1000: presence.updateInfo() if self.isDifferent(old_status, new_status): notifyObservers(self.PlayerPresenceChanged()) elif self.isDifferent(old_name, new_name): notifyObservers(self.PlayerPresenceChanged()) def __init__(self): """ generated source for method __init__ """ super(PlayerPresenceManager, self).__init__() self.monitoredPlayers = HashMap() loadPlayersJSON() if len(self.monitoredPlayers) == 0: try: # When starting from a blank slate, add some initial players to the # monitoring list just so that it's clear how it works. addPlayer("127.0.0.1:9147") addPlayer("127.0.0.1:9148") except InvalidHostportException as e: self.PresenceMonitor().start() @SuppressWarnings("serial") class InvalidHostportException(Exception): """ generated source for class InvalidHostportException """ def addPlayerSilently(self, hostport): """ generated source for method addPlayerSilently """ try: if not self.monitoredPlayers.containsKey(hostport): self.monitoredPlayers.put(hostport, presence) return presence else: return self.monitoredPlayers.get(hostport) except ArrayIndexOutOfBoundsException as e: raise self.InvalidHostportException() except NumberFormatException as e: raise self.InvalidHostportException() def addPlayer(self, hostport): """ generated source for method addPlayer """ presence = self.addPlayerSilently(hostport) notifyObservers(self.PlayerPresenceAdded()) savePlayersJSON() return presence def removePlayer(self, hostport): """ generated source for method removePlayer """ self.monitoredPlayers.remove(hostport) notifyObservers(self.PlayerPresenceRemoved()) savePlayersJSON() def getPresence(self, hostport): """ generated source for method getPresence """ return self.monitoredPlayers.get(hostport) def getSortedPlayerNames(self): """ generated source for method getSortedPlayerNames """ return TreeSet(self.monitoredPlayers.keySet()) observers = HashSet() def addObserver(self, observer): """ generated source for method addObserver """ self.observers.add(observer) def notifyObservers(self, event): """ generated source for method notifyObservers """ for observer in observers: observer.observe(event) playerListFilename = ".ggpserver-playerlist.json" def savePlayersJSON(self): """ generated source for method savePlayersJSON """ try: playerListJSON.put("hostports", self.monitoredPlayers.keySet()) if not file_.exists(): file_.createNewFile() bw.write(playerListJSON.__str__()) bw.close() except IOException as ie: ie.printStackTrace() except JSONException as e: e.printStackTrace() def loadPlayersJSON(self): """ generated source for method loadPlayersJSON """ try: if not file_.exists(): return try: while (line = br.readLine()) != None: pdata.append(line) finally: br.close() if playerListJSON.has("hostports"): while i < len(theHostports): try: self.addPlayerSilently(theHostports.get(i).__str__()) except InvalidHostportException as e: e.printStackTrace() i += 1 except IOException as ie: ie.printStackTrace() except JSONException as e: e.printStackTrace()
class TtlCache(Map, K, V): """ generated source for class TtlCache """ class Entry(object): """ generated source for class Entry """ ttl = int() value = V() def __init__(self, value, ttl): """ generated source for method __init__ """ self.value = value self.ttl = ttl @SuppressWarnings("unchecked") def equals(self, o): """ generated source for method equals """ if isinstance(o, (self.Entry, )): return (o).value == self.value return False contents = Map() ttl = int() def __init__(self, ttl): """ generated source for method __init__ """ super(TtlCache, self).__init__() self.contents = HashMap() self.ttl = ttl @synchronized def containsKey(self, key): """ generated source for method containsKey """ return self.contents.containsKey(key) @synchronized def get(self, key): """ generated source for method get """ entry = self.contents.get(key) if entry == None: return None # Reset the TTL when a value is accessed directly. entry.ttl = self.ttl return entry.value @synchronized def prune(self): """ generated source for method prune """ toPrune = ArrayList() for key in contents.keySet(): if entry.ttl == 0: toPrune.add(key) entry.ttl -= 1 for key in toPrune: self.contents.remove(key) @synchronized def put(self, key, value): """ generated source for method put """ x = self.contents.put(key, self.Entry(value, self.ttl)) if x == None: return None return x.value @synchronized def size(self): """ generated source for method size """ return len(self.contents) @synchronized def clear(self): """ generated source for method clear """ self.contents.clear() @synchronized def containsValue(self, value): """ generated source for method containsValue """ return self.contents.containsValue(value) @synchronized def isEmpty(self): """ generated source for method isEmpty """ return self.contents.isEmpty() @synchronized def keySet(self): """ generated source for method keySet """ return self.contents.keySet() @synchronized def putAll(self, m): """ generated source for method putAll """ for anEntry in m.entrySet(): self.put(anEntry.getKey(), anEntry.getValue()) @synchronized def remove(self, key): """ generated source for method remove """ return self.contents.remove(key).value @synchronized def values(self): """ generated source for method values """ theValues = HashSet() for e in contents.values(): theValues.add(e.value) return theValues class entrySetMapEntry(Map, Entry, K, V): """ generated source for class entrySetMapEntry """ key = K() value = V() def __init__(self, k, v): """ generated source for method __init__ """ super(entrySetMapEntry, self).__init__() self.key = k self.value = v def getKey(self): """ generated source for method getKey """ return self.key def getValue(self): """ generated source for method getValue """ return self.value def setValue(self, value): """ generated source for method setValue """ return (self.value = value) @synchronized def entrySet(self): """ generated source for method entrySet """ theEntries = HashSet() for e in contents.entrySet(): theEntries.add(self.entrySetMapEntry(e.getKey(), e.getValue().value)) return theEntries
class TimerTab(ITab, IHttpListener): def __init__(self, callbacks, helpers): self._callbacks = callbacks self._helpers = helpers self.isRunning = True self.toolFilter = 0 self.reqResMap = HashMap() callbacks.registerHttpListener(self) self.panel = TimerPanel( logtable_factory=lambda model: LogTable(model, self._callbacks), external_clear_button_action_listener=lambda e: self.getReqResMap( ).clear(), external_start_button_action_listener=lambda e: self.setRunning( True), external_stop_button_action_listener=lambda e: self.setRunning( False), external_filter_action_listener=self.filter_action_listener, tools_keys=["All", "Proxy", "Intruder", "Scanner", "Repeater"]) def getTabCaption(self): """ Override ITab method :return: tab name """ return "InQL Timer" def getUiComponent(self): """ Override ITab method :return: Tab UI Component """ self._callbacks.customizeUiComponent(self.panel.this) return self.panel.this def filter_action_listener(self, e): tool = e.getSource().getSelectedItem() if tool == "All": self.setToolFilter(0) elif tool == "Proxy": self.setToolFilter(IBurpExtenderCallbacks.TOOL_PROXY) elif tool == "Intruder": self.setToolFilter(IBurpExtenderCallbacks.TOOL_INTRUDER) elif tool == "Scanner": self.setToolFilter(IBurpExtenderCallbacks.TOOL_SCANNER) elif tool == "Repeater": self.setToolFilter(IBurpExtenderCallbacks.TOOL_REPEATER) else: raise RuntimeError("Unknown tool: %s" % tool) def setRunning(self, running): self.isRunning = running def setToolFilter(self, toolFilter): self.toolFilter = toolFilter def processHttpMessage(self, toolFlag, messageIsRequest, requestResponse): if self.isRunning: if self.toolFilter == 0 or self.toolFilter == toolFlag: messageInfo = self._helpers.analyzeRequest(requestResponse) url = messageInfo.getUrl() requestBody = requestResponse.getRequest( )[messageInfo.getBodyOffset():].tostring() if not is_query(requestBody): return # exit early qobj = json.loads(requestBody) queryBody = "" operationName = "" if 'query' in qobj: queryBody = qobj['query'] if 'operationName' in qobj: operationName = qobj['operationName'] if messageIsRequest: self.reqResMap.put(url, System.currentTimeMillis()) elif self.reqResMap.containsKey(url): time = System.currentTimeMillis() - self.reqResMap.get(url) self.reqResMap.remove(url) # create a new log entry with the message details synchronize.apply_synchronized( self.panel.getLogTableModel().getLogArray(), self.syncProcessHttpMessage, (toolFlag, requestResponse, time, queryBody, operationName)) def syncProcessHttpMessage(self, toolFlag, messageInfo, time, queryBody, operationName): row = self.panel.getLogTableModel().getLogArray().size() # Log all requests - the default if not self.panel.getQueryFilterText( ) and not self.panel.isScopeSelected(): self.addLog(messageInfo, toolFlag, time, row, operationName) # Log filter URL requests elif not self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \ self.panel.getQueryFilterText() in queryBody: self.addLog(messageInfo, toolFlag, time, row, operationName) # Log in-scope requests elif self.panel.isScopeSelected() and not self.panel.getQueryFilterText() and \ self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()): self.addLog(messageInfo, toolFlag, time, row, operationName) # Log in-scope requests and filter elif self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \ self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()) and \ self.panel.getQueryFilterText() in queryBody: self.addLog(messageInfo, toolFlag, time, row, operationName) def addLog(self, messageInfo, toolFlag, time, row, operationName): self.panel.getLogTableModel().getLogArray().add( Log( LocalDateTime.now(), self._callbacks.getToolName(toolFlag), self._callbacks.saveBuffersToTempFiles(messageInfo), self._helpers.analyzeRequest(messageInfo).getUrl(), self._helpers.analyzeResponse( messageInfo.getResponse()).getStatusCode(), operationName, time)) self.panel.getLogTableModel().fireTableRowsInserted(row, row) def getReqResMap(self): return self.reqResMap
class PagingWindow(UserList): def __init__(self, **kwargs): # The list is contained w/in self.data UserList.__init__(self) self.limitSize = 0 # Unbounded self.limitMin = False # unbounded self.limitMax = False # unbounded self.minElmIndx = -1 self.minElmVal = None self.maxElmIndx = -1 self.maxElmVal = None # Create a hash map for storing an object as well. self.hash = HashMap() opts = { 'limitSize' : self.optLimitSize, 'limitMin' : self.optLimitMin, 'limitMax' : self.optLimitMax, 'preserveSmallest' : self.optPreserveSmallest, 'preserveLargest' : self.optPreserveLargest, } # Process each optional argumen. for k in kwargs.keys(): optFunc = opts[k] if optFunc is None: raise LookupError("Option [k] is not supported by the PagingWindow class.") else: optFunc(kwargs[k]) random.seed(time.time()) def optLimitSize(self, _size): if type(_size).__name__ != "int": raise ValueError("limitSize parameter must be type int. Got type [{}].".format(type(_size).__name__)) self.limitSize = _size def optLimitMin(self, _min): if type(_min).__name__ != "bool": raise ValueError("limitMin parameter must be type bool.") self.limitMin = _min def optLimitMax(self, _max): if type(_max).__name__ != "bool": raise ValueError("limitMax parameter must be type bool.") self.limitMax = _max def optPreserveSmallest(self, _small): if type(_small).__name__ != "bool": raise ValueError("preserveSmallest parameter must be type bool.") if _small: self.limitMin = False self.limitMax = True def optPreserveLargest(self, _large): if type(_large).__name__ != "bool": raise ValueError("preserveLargest parameter must be type bool.") if _large: self.limitMin = True self.limitMax = False def add(self, _key, _value = None): # print "==> value[{}] limitSize[{}]".format(_key, self.limitSize) # print "==> data.__len__[%d]" % self.data.__len__() dataLen = self.data.__len__() if dataLen < self.limitSize: ''' Here we add to the list when the list had not reached its max size. ''' # print "..> appeding to data: [%s]" % _key self.data.append(_key) if _value is not None: # print " ++> added _value[{}]".format(_value) self.hash.put(_key, _value) # We should remove the sort on every insert. # Use sortedcontainers instead. self.data.sort() else: # print "..> not appending to data: [%s]" % _key insertMinOk = True insertMaxOk = True if self.limitMin: ''' If the new value is greater than the current minElement, we may need to remove the current minElement to make room for the new value. ''' if self.data.__len__ > 0: # The minElmIndx is always 0, # unless the array has no data. self.minElmIndx = 0 else: self.minElmIndx = -1 if self.minElmIndx >= 0: self.minElmVal = self.data[self.minElmIndx] if _key < self.minElmVal: insertMinOk = False if self.limitMax: ''' If the new value is smaller than the current maxElement, we may need to remove the current maxElement to make room for the new value. ''' self.maxElmIndx = self.data.__len__() - 1 if self.maxElmIndx > 0: self.maxElmVal = self.data[self.maxElmIndx] if _key > self.maxElmVal: insertMaxOk = False if self.limitMin and self.limitMax: ''' Handle the case where it is ok to insert for either case of limitMin and limitMax ''' if insertMinOk and insertMaxOk: # choseSize() may be a custom function that gets passed in. side = self.choseSide(_key) if side == 0: raise AssertionError("chooseSide() should not return 0 as a result") if side < 0: if self.minElmVal is not None: self.data.remove(self.minElmVal) if self.hash.containsKey(self.minElmVal): self.hash.remove(self.minElmVal) if side > 0: if self.maxElmVal is not None: self.data.remove(self.maxElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.limitMin: if insertMinOk: if self.minElmVal is not None: self.data.remove(self.minElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.data.__len__() + 1 > self.limitSize: return False if self.limitMax: if insertMaxOk: if self.maxElmVal is not None: self.data.remove(self.maxElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.data.__len__() + 1 > self.limitSize: return False self.data.append(_key) if _value is not None: # print " ++> added _value[{}]".format(_value) self.hash.put(_key, _value) # We should remove the sort on every insert. # Possibly use sortedcontainers instead. self.data.sort() # Return True when a value is added return True def pop(self, indx): # Pop the 0 item from the list _key = super(UserList, self).pop(indx) # By default, return the key. retVal = _key # But, if the key has a corresponding value in the hash... if self.hash.containsKey(_key): # return the hash... retVal = [ _key, self.hash.get(_key) ] # and removed the object from the hash self.hash.remove(_key) return retVal def chooseSide(_key): r = random.getrandbits(1) if (r == 0): return -1 if (r == 1): return 1 def size(self): return self.data.__len__()
def getJdbcResources(self, env, jdbcOshMap, globalJdbcResources=None): if env is None: return jdbcResources = HashMap() resources = env.getChildren('Resource') for resource in resources: name = resource.getAttributeValue('name') dsType = resource.getAttributeValue('type') driverClassName = resource.getAttributeValue('driverClassName') url = resource.getAttributeValue('url') maxActive = resource.getAttributeValue('maxActive') logger.debug('Found jdbc datasource ', name, ' driver ', str(driverClassName), ' url ', str(url)) jdbcResources.put(name, JdbcResource(name, dsType, driverClassName, url, maxActive)) for resource in resources: name = resource.getAttributeValue('name') if name is None: continue # do not read additional parameters for non-existing resource jdbcResource = jdbcResources.get(name) if jdbcResource is None: continue # update existing JDBC resource with absent parameters data for resourceParamsEl in env.getChildren('ResourceParams'): if resourceParamsEl.getAttributeValue('name') == name: resourceParams = self.getResourceParamsValues(resourceParamsEl) dsType = resourceParams.get('type') if (dsType is not None) and (jdbcResource.type is None): jdbcResource.type = dsType driverClassName = resourceParams.get('driverClassName') if (driverClassName is not None) and (jdbcResource.driverClass is None): jdbcResource.driverClass = driverClassName url = resourceParams.get('url') if (url is not None) and (jdbcResource.url is None): jdbcResource.url = url maxActive = resourceParams.get('maxActive') if (maxActive is not None) and (jdbcResource.maxActive is None): jdbcResource.maxActive = maxActive if jdbcResource.type != 'javax.sql.DataSource': jdbcResources.remove(name) resources = env.getChildren('ResourceLink') for resource in resources: name = resource.getAttributeValue('name') globalName = resource.getAttributeValue('global') dsType = resource.getAttributeValue('type') logger.debug('Found resource link ', name, ' for global name ', globalName, ' of type ', dsType) if dsType != 'javax.sql.DataSource': continue if globalJdbcResources is not None: jdbcResource = globalJdbcResources.get(globalName) if jdbcResource is None: continue logger.debug('Found jdbc datastore with global name ', globalName) jdbcOshMap.put(name, jdbcResource) dnsResolver = _DnsResolverDecorator(netutils.JavaDnsResolver(), self.destinationIp) reporter = jdbcModule.DnsEnabledJdbcTopologyReporter( jdbcModule.DataSourceBuilder(), dnsResolver) class Container: def __init__(self, osh): self.osh = osh def getOsh(self): return self.osh container = Container(self.tomcatOsh) for jdbc in jdbcResources.values(): datasource = jdbcModule.Datasource(jdbc.name, jdbc.url, driverClass=jdbc.driverClass) self.OSHVResult.addAll(reporter.reportDatasources(container, datasource)) jdbcOshMap.put(jdbc.name, datasource.getOsh())
def getJdbcResources(self, env, jdbcOshMap, globalJdbcResources=None): if env is None: return jdbcResources = HashMap() resources = env.getChildren('Resource') for resource in resources: name = resource.getAttributeValue('name') dsType = resource.getAttributeValue('type') driverClassName = resource.getAttributeValue('driverClassName') url = resource.getAttributeValue('url') maxActive = resource.getAttributeValue('maxActive') logger.debug('Found jdbc datasource ', name, ' driver ', str(driverClassName), ' url ', str(url)) jdbcResources.put( name, JdbcResource(name, dsType, driverClassName, url, maxActive)) for resource in resources: name = resource.getAttributeValue('name') if name is None: continue # do not read additional parameters for non-existing resource jdbcResource = jdbcResources.get(name) if jdbcResource is None: continue # update existing JDBC resource with absent parameters data for resourceParamsEl in env.getChildren('ResourceParams'): if resourceParamsEl.getAttributeValue('name') == name: resourceParams = self.getResourceParamsValues( resourceParamsEl) dsType = resourceParams.get('type') if (dsType is not None) and (jdbcResource.type is None): jdbcResource.type = dsType driverClassName = resourceParams.get('driverClassName') if (driverClassName is not None) and ( jdbcResource.driverClass is None): jdbcResource.driverClass = driverClassName url = resourceParams.get('url') if (url is not None) and (jdbcResource.url is None): jdbcResource.url = url maxActive = resourceParams.get('maxActive') if (maxActive is not None) and (jdbcResource.maxActive is None): jdbcResource.maxActive = maxActive if jdbcResource.type != 'javax.sql.DataSource': jdbcResources.remove(name) resources = env.getChildren('ResourceLink') for resource in resources: name = resource.getAttributeValue('name') globalName = resource.getAttributeValue('global') dsType = resource.getAttributeValue('type') logger.debug('Found resource link ', name, ' for global name ', globalName, ' of type ', dsType) if dsType != 'javax.sql.DataSource': continue if globalJdbcResources is not None: jdbcResource = globalJdbcResources.get(globalName) if jdbcResource is None: continue logger.debug('Found jdbc datastore with global name ', globalName) jdbcOshMap.put(name, jdbcResource) dnsResolver = _DnsResolverDecorator(netutils.JavaDnsResolver(), self.destinationIp) reporter = jdbcModule.DnsEnabledJdbcTopologyReporter( jdbcModule.DataSourceBuilder(), dnsResolver) class Container: def __init__(self, osh): self.osh = osh def getOsh(self): return self.osh container = Container(self.tomcatOsh) for jdbc in jdbcResources.values(): datasource = jdbcModule.Datasource(jdbc.name, jdbc.url, driverClass=jdbc.driverClass) self.OSHVResult.addAll( reporter.reportDatasources(container, datasource)) jdbcOshMap.put(jdbc.name, datasource.getOsh())