def handleQuery(self, query, fieldName, formatStr): out = ByteArrayOutputStream() req = SearchRequest(query) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): results = res.getResults() for searchRes in results: searchResList = searchRes.getList(fieldName) if (searchResList.isEmpty() == False): for hit in searchResList: if self.term is not None: if hit.find(self.term) != -1: hits.add(hit) else: hits.add(hit) self.writer.print("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.print("," + formatStr % {"hit": hit}) else: self.writer.print(formatStr % {"hit": hit}) hitnum += 1 self.writer.print("]") else: self.writer.println("[\"\"]") self.writer.close()
def __init__(self, client_addr, server_addr, operations_map, is_simulator=False): self.client_addr = client_addr self.server_addr = server_addr self.is_simulator = is_simulator self.operations = dict(operations_map) self.operations["help"] = "help <optional command>" self.operations["login"] = "******" self.operations["logout"] = "logout" self.operations["echo"] = "echo text" self.operations["exit"] = "exit" self.aux_commands = {"help", "login", "logout", "echo", "exit"} self.op_commands = set(operations_map.keySet()) self.all_commands = set(self.op_commands) self.all_commands.update(self.aux_commands) all_commands_set = HashSet() for x in self.all_commands: all_commands_set.add(x) try: self.console = ConsoleReader() self.console.addCompleter(StringsCompleter(all_commands_set)) self.console.setPrompt("prompt> ") except IOException as err: err.printStackTrace()
def createNG(): exist = doesNodeGroupExist(ngName) if (exist == "true"): print "Node group " + ngName + " already exists. Skipping its creation..." return nodes = convertToList(AdminConfig.list("Node")) nodesForNG = HashSet() for node in nodes: nodeName = AdminConfig.showAttribute(node, "name") nodeagentid = AdminConfig.getid("/Node:" + nodeName + "/Server:nodeagent/") if (nodeagentid != None and nodeagentid != ""): nodeservers = convertToList(AdminConfig.list("Server", node)) addNode = 1 for server in nodeservers: serverType = AdminConfig.showAttribute(server, "serverType") if (serverType == "ONDEMAND_ROUTER"): addNode = 0 break if (addNode == 1): nodesForNG.add(nodeName) print "Creating node group " + ngName AdminTask.createNodeGroup(ngName) for nd in nodesForNG: AdminTask.addNodeGroupMember(ngName, ["-nodeName", nd])
def handleQuery(self, query, fieldName, formatStr): out = ByteArrayOutputStream() req = SearchRequest(query) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): results = res.getResults() for searchRes in results: searchResList = searchRes.getList(fieldName) if (searchResList.isEmpty()==False): for hit in searchResList: if self.term is not None: if hit.find(self.term) != -1: hits.add(hit) else: hits.add(hit) self.writer.print("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.print(","+formatStr % {"hit":hit}) else: self.writer.print(formatStr % {"hit":hit}) hitnum += 1 self.writer.print("]") else: self.writer.println("[\"\"]") self.writer.close()
def handleGrantNumber(self): out = ByteArrayOutputStream() req = SearchRequest("grant_numbers:%s*" % self.term) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): creatorResults = res.getResults() for creatorRes in creatorResults: creatorList = creatorRes.getList("grant_numbers") if (creatorList.isEmpty()==False): for hit in creatorList: hits.add(hit) self.writer.print("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.print(",\"%s\"" % hit) else: self.writer.print("\"%s\"" % hit) hitnum += 1 self.writer.print("]") else: self.writer.println("[\"\"]") self.writer.close()
def save_and_get_complex(): '''Test saving entities containing mapped collection properties''' entity1 = TestEntities.ComplexEntity() entity1.setId("complex1") strings = ArrayList() strings.add("one") strings.add("two") entity1.setStringList(strings) ints = HashSet() ints.add(1) ints.add(2) entity1.setIntSet(ints) extended = HashMap() extended.put("prop1", "one") extended.put("prop2", "two") entity1.setExtendedProps(extended) service = EntityService(TestEntities.ComplexEntity) service.save(entity1) entity2 = service.get("complex1") assertNotNull(entity2) assertEquals(entity2.getId(), entity1.getId()) assertTrue(entity2.getStringList().contains("one")) assertTrue(entity2.getStringList().contains("two")) assertTrue(entity2.getIntSet().contains(java.lang.Long(1))) assertTrue(entity2.getIntSet().contains(java.lang.Long(2))) assertNotNull(entity2.getExtendedProps()) assertEquals(entity2.getExtendedProps().get("prop1"), "one") assertEquals(entity2.getExtendedProps().get("prop2"), "two")
def prepareClientsSet(self, configurationAttributes): clientsSet = HashSet() if (not configurationAttributes.containsKey("allowed_clients")): return clientsSet allowedClientsList = configurationAttributes.get( "allowed_clients").getValue2() if (StringHelper.isEmpty(allowedClientsList)): print "UMA authorization policy. Initialization. The property allowed_clients is empty" return clientsSet allowedClientsListArray = StringHelper.split(allowedClientsList, ",") if (ArrayHelper.isEmpty(allowedClientsListArray)): print "UMA authorization policy. Initialization. There aren't clients specified in allowed_clients property" return clientsSet # Convert to HashSet to quick search i = 0 count = len(allowedClientsListArray) while (i < count): client = allowedClientsListArray[i] clientsSet.add(client) i = i + 1 return clientsSet
def second(extracted): print "getting stats..." dostats = BasicStatisticsOperation(extracted, False) stats = dostats.processData() print "...done." print "drawing charts..." for metric in stats.get(0).getMetrics(): grapher = DrawMMMGraph(stats) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setSortXAxis(True) grapher.setLogYAxis(True) grapher.setStripXName("MPI_Send\(\) \[ <message size> = <") grapher.setTitle("NPB3.2.1 mg.A.4 - MPI_Send() Performance: " + metric) grapher.setSeriesType(DrawMMMGraph.TRIALNAME) grapher.setCategoryType(DrawMMMGraph.EVENTNAME) grapher.setValueType(AbstractResult.EXCLUSIVE) grapher.setXAxisLabel("Message Size") grapher.setYAxisLabel("Exclusive " + metric) # grapher.setLogYAxis(True) grapher.processData() print "...done." return
def getBestVariable(self, functionalSentence, functionInfo): """ generated source for method getBestVariable """ # If all the variables that can be set by the functional sentence are in # the varOrdering, we return null. Otherwise, we return one of # those with the largest domain. # The FunctionInfo is sentence-independent, so we need the context # of the sentence (which has variables in it). tuple_ = GdlUtils.getTupleFromSentence(functionalSentence) dependentSlots = functionInfo.getDependentSlots() if len(tuple_) != len(dependentSlots): raise RuntimeException("Mismatched sentence " + functionalSentence + " and constant form " + functionInfo) candidateVars = HashSet() i = 0 while i < len(tuple_): if isinstance(term, (GdlVariable, )) and dependentSlots.get(i) and not self.varOrdering.contains(term) and self.varsToAssign.contains(term): candidateVars.add(term) i += 1 # Now we look at the domains, trying to find the largest bestVar = None bestDomainSize = 0 for var in candidateVars: if domainSize > bestDomainSize: bestVar = var bestDomainSize = domainSize return bestVar
def drawGraph(results, inclusive): print "drawing charts..." for metric in results.get(0).getMetrics(): grapher = DrawGraph(results) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setLogYAxis(False) grapher.setShowZero(True) grapher.setTitle(inApp + ": " + inExp + ": " + metric) grapher.setSeriesType(DrawGraph.EVENTNAME) grapher.setUnits(DrawGraph.SECONDS) grapher.setCategoryType(DrawGraph.TRIALNAME) grapher.setXAxisLabel("Trial Date") grapher.setShortenNames(True) if inclusive == True: grapher.setValueType(AbstractResult.INCLUSIVE) grapher.setYAxisLabel("Inclusive " + metric + " (seconds)") else: grapher.setValueType(AbstractResult.EXCLUSIVE) grapher.setYAxisLabel("Exclusive " + metric + " (seconds)") grapher.processData() if inclusive == True: grapher.drawChartToFile(outFile1) else: grapher.drawChartToFile(outFile2) print "...done." return
def solveTurns(self, model): """ generated source for method solveTurns """ # Before we can do anything else, we need a topological ordering on our forms ordering = getTopologicalOrdering(model.getIndependentSentenceForms(), model.getDependencyGraph()) ordering.retainAll(self.formsControlledByFlow) # Let's add function info to the consideration... functionInfoMap = HashMap() for form in constantForms: functionInfoMap.put(form, FunctionInfoImpl.create(form, self.constantChecker)) # First we set the "true" values, then we get the forms controlled by the flow... # Use "init" values trueFlowSentences = HashSet() for form in constantForms: if form.__name__ == self.INIT: for initSentence in constantChecker.getTrueSentences(form): trueFlowSentences.add(trueSentence) # Go through ordering, adding to trueFlowSentences addSentenceForms(ordering, trueFlowSentences, model, functionInfoMap) self.sentencesTrueByTurn.add(trueFlowSentences) while True: # Now we use the "next" values from the previous turn trueFlowSentences = HashSet() for sentence in sentencesPreviouslyTrue: if sentence.__name__ == self.NEXT: trueFlowSentences.add(trueSentence) addSentenceForms(ordering, trueFlowSentences, model, functionInfoMap) # Test if this turn's flow is the same as an earlier one while i < len(self.sentencesTrueByTurn): if prevSet == trueFlowSentences: # Complete the loop self.turnAfterLast = i break i += 1 self.sentencesTrueByTurn.add(trueFlowSentences)
def recordPropositions(self): """ generated source for method recordPropositions """ propositions = HashSet() for component in components: if isinstance(component, (Proposition, )): propositions.add(component) return propositions
class CyclicTypeRecorder(object): def __init__(self): self.count = 0 self.elements = HashMap() self.used = HashSet() def push(self, t): self.count += 1 self.elements[t] = self.count return self.count def pop(self, t): del self.elements[t] if t in self.used: self.used.remove(t) def visit(self, t): i = self.elements.get(t) if i is not None: self.used.add(t) return i def isUsed(self, t): return t in self.used
def handleWorkflowStep(self): out = ByteArrayOutputStream() req = SearchRequest("workflow_step_label:[* TO *]" ) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): recordTypeResults = res.getResults() for recordTypeResult in recordTypeResults: recordTypeList = recordTypeResult.getList("workflow_step_label") if (recordTypeList.isEmpty()==False): for hit in recordTypeList: hits.add(hit) self.writer.println("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.println(",{\"value\": \"%s\",\n\"label\": \"%s\"}" % (hit,hit)) else: self.writer.println("{\"value\": \"%s\",\n\"label\": \"%s\"}" % (hit,hit)) hitnum += 1 self.writer.println("]") else: self.writer.println("[\"\"]") self.writer.close()
def prepareClientRedirectUris(self, configurationAttributes): clientRedirectUrisSet = HashSet() if not configurationAttributes.containsKey("client_redirect_uris"): return clientRedirectUrisSet clientRedirectUrisList = configurationAttributes.get( "client_redirect_uris").getValue2() if StringHelper.isEmpty(clientRedirectUrisList): print "Casa client registration. The property client_redirect_uris is empty" return clientRedirectUrisSet clientRedirectUrisArray = StringHelper.split(clientRedirectUrisList, ",") if ArrayHelper.isEmpty(clientRedirectUrisArray): print "Casa client registration. No clients specified in client_redirect_uris property" return clientRedirectUrisSet # Convert to HashSet to quick search i = 0 count = len(clientRedirectUrisArray) while i < count: uris = clientRedirectUrisArray[i] clientRedirectUrisSet.add(uris) i = i + 1 return clientRedirectUrisSet
def getNodeObjects(api, filters): found = 0 ndMap = {} ndSet = HashSet() lanSwitchCapability = "com.hp.nnm.capability.node.lanswitching" ipForwardingCapability = "com.hp.nnm.capability.node.ipforwarding" try: ndStub = api.getStub(NnmServicesEnum().Node) for filter in filters: allNodesArray = ndStub.getNodes(filter) allNodes = allNodesArray.getItem() if allNodes != None: found = 1 logger.debug("Retrieved %s Node Objects" % (len(allNodes))) for i in range(len(allNodes)): if (notNull(allNodes[i].getId()) and notNull(allNodes[i].getName()) and notNull(allNodes[i].getCreated()) and notNull(allNodes[i].getModified())): ## Don't add duplicate Nodes if ndSet.contains(allNodes[i].getId()): continue else: ndSet.add(allNodes[i].getId()) # The capabilities com.hp.nnm.capability.node.lanswitching and # com.hp.nnm.capability.node.ipforwarding have replaced isLanSwitch and isIPv4Router respectively. isLanSwitch = 0 isRouter = 0 caps = allNodes[i].getCapabilities() if (notNull(caps)): for cap in caps: key = cap.getKey().strip() if (key == lanSwitchCapability): isLanSwitch = 1 if (key == ipForwardingCapability): isRouter = 1 ndMap[allNodes[i].getId()] = UNode(allNodes[i].getId(), allNodes[i].getName(), isRouter, isLanSwitch, allNodes[i].getSystemName(), allNodes[i].getSystemContact(), allNodes[i].getSystemDescription(), allNodes[i].getSystemLocation(), allNodes[i].getSystemObjectId(), allNodes[i].getLongName(), allNodes[i].getSnmpVersion(), allNodes[i].getDeviceModel(), allNodes[i].getDeviceVendor(), allNodes[i].getDeviceFamily(), allNodes[i].getDeviceDescription(), allNodes[i].getDeviceCategory(), '', '') else: break except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) errMsg = 'Exception:\n %s' % stacktrace logger.error(errMsg) api.Framework.reportWarning(errMsg) if found: logger.debug('Created a dictionary of %d Node objects' % (len(ndMap))) else: errMsg = 'Did not find any Node objects' logger.debug(errMsg) api.Framework.reportWarning(errMsg) return ndMap
def containers(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious current_container = deployed.container if delta.operation != "NOOP" and current_container.type == "wlp.Server" and current_container.rebootServer is True: result.add(current_container) return result
def getContext_0(cls, state, roles, moves): """ generated source for method getContext_0 """ context = HashSet(state.getContents()) i = 0 while i < len(roles): context.add(toDoes(roles.get(i), moves.get(i))) i += 1 return context
def getScripts(self, menu): from java.util import HashSet scriptList = HashSet() if self.menuToProcMap.has_key(menu): menuItems = self.menuToProcMap[menu] for item in menuItems: scriptList.add(str(item)) return scriptList
def getUncachedGameKeys(self): """ generated source for method getUncachedGameKeys """ theKeys = HashSet() for game in File("games/test").listFiles(): if not game.__name__.endsWith(".kif"): continue theKeys.add(game.__name__.replace(".kif", "")) return theKeys
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if _delta.operation != "NOOP" and current_container.type == "iib.Server": result.add(current_container) return result
def containers(): result = HashSet() for _delta in specification.deltas: depl = _delta.deployedOrPrevious current_container = depl.container if depl.type in ("soa.CompositeSOADeployable","soa.Composite") and current_container.type in ("wls.Server",'wls.Cluster'): result.add(current_container) return result
def __toJavaSetInternal(val): ''' Does the actual conversion of the elements inside of the set or frozenset to Set ''' retObj = HashSet() for v in val : retObj.add(JUtil.pyValToJavaObj(v)) return retObj
def getModelFromPoints(sourcePoints, targetPoints): rigidModel = RigidModel2D() pointMatches = HashSet() for a in zip(sourcePoints, targetPoints): pm = PointMatch(Point([a[0][0], a[0][1]]), Point([a[1][0], a[1][1]])) pointMatches.add(pm) rigidModel.fit(pointMatches) return rigidModel
def getStateFromBase(self): """ generated source for method getStateFromBase """ contents = HashSet() for p in propNet.getBasePropositions().values(): p.setValue(p.getSingleInput().getValue()) if p.getValue(): contents.add(p.__name__) return MachineState(contents)
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if _delta.operation != "NOOP" and current_container.type == "example.Server": result.add(current_container) return result
def deployeds(): result = HashSet() for _delta in deltas.deltas: depl = _delta.deployedOrPrevious current_container = depl.container if _delta.operation != "NOOP" and depl.type == "soa.ExtensionLibrary" and current_container.type in ("wls.SOAServer",'wls.SOACluster'): result.add(depl) return result
def glue(): print "doing long run test" # load the trial Utilities.setSession("perfdmf_test") trial1 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.first") result1 = TrialMeanResult(trial1) trial2 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.second") result2 = TrialMeanResult(trial2) trial3 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.third") result3 = TrialMeanResult(trial3) # merge the trials together - they have different metrics merger = MergeTrialsOperation(result1) merger.addInput(trial2) # merger.addInput(trial3) merged = merger.processData() # extract the interval events reducer = ExtractPhasesOperation(merged.get(0), "Iteration") reduceds = reducer.processData() reduced = reduceds.get(0) for event in reduced.getEvents(): for metric in reduced.getMetrics(): for thread in reduced.getThreads(): if event.find("measurement") >= 0: print metric, thread, reduced.getInclusive( thread, event, metric) # do the correlation correlation = CorrelationOperation(reduced) outputs = correlation.processData() result = outputs.get(0) # type = AbstractResult.INCLUSIVE; # for event in result.getEvents(): # for metric in result.getMetrics(): # for thread in result.getThreads(): # if event.find("INCLUSIVE") >= 0: # print event, CorrelationResult.typeToString(thread), metric, ":", AbstractResult.typeToString(type), result.getDataPoint(thread, event, metric, type) events = ArrayList() for event in merged.get(0).getEvents(): if event.find("Iteration") >= 0: events.add(event) extractor = ExtractEventOperation(merged.get(0), events) extracted = extractor.processData().get(0) for metric in extracted.getMetrics(): grapher = DrawGraph(extracted) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setCategoryType(DrawGraph.EVENTNAME) grapher.setValueType(AbstractResult.INCLUSIVE) grapher.setLogYAxis(True) grapher.processData()
def get(self, sentence, varRenamedSentence): """ generated source for method get """ cacheContents = self.contents.get(varRenamedSentence) if cacheContents == None: return None results = HashSet() for answer in cacheContents: results.add(Unifier.unify(sentence, answer)) return ArrayList(results)
def addQualifier(self, type, value): if type and value: qualifiers = None if self.typeToQualifiers.has_key(type): qualifiers = self.typeToQualifiers[type] else: qualifiers = HashSet() self.typeToQualifiers[type] = qualifiers qualifiers.add(value)
def containers(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious current_container = deployed.container if current_container.type == "wlp.Server": logger.info("adding container: %s to the containers to update their features" % current_container ) result.add(current_container) return result
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if _delta.operation != "NOOP" and current_container.type == "openshift.Server" and current_container.installClient: result.add(current_container) if _delta.operation != "NOOP" and current_container.type == "openshift.ProjectModule" and current_container.server.installClient: result.add(current_container.server) return result
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if deployed.hasProperty('restartContainer'): restartContainer = deployed.restartContainer if restartContainer and _delta.operation != "NOOP" and (current_container.type in ["jbossdm.Domain","jbossdm.StandaloneServer","jbossdm.Profile"]): result.add(current_container) return result
def getUncachedGameKeys(self): """ generated source for method getUncachedGameKeys """ theGameKeys = HashSet() try: while i < len(theArray): theGameKeys.add(theArray.getString(i)) i += 1 except Exception as e: e.printStackTrace() return theGameKeys
def unique_supported_deployables(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious has_checksum = hasattr(deployed.deployable, "checksum") is_supported_operation = delta.operation == "CREATE" or delta.operation == "MODIFY" if has_checksum and is_supported_operation: result.add(deployed.deployable) return result
def dynamicClusters(): result = HashSet() if deployedApplication: for container in deployedApplication.environment.members: if container.type == "was.DynamicCluster": result.add(container) if previousDeployedApplication: for container in previousDeployedApplication.environment.members: if container.type == "was.DynamicCluster": result.add(container) return result
def ask(self, query, context, askOne): """ generated source for method ask """ goals = LinkedList() goals.add(query) answers = HashSet() alreadyAsking = HashSet() self.ask(goals, KnowledgeBase(context), Substitution(), ProverCache(), VariableRenamer(), askOne, answers, alreadyAsking) results = HashSet() for theta in answers: results.add(Substituter.substitute(query, theta)) return results
def webcontainers(appname): result = HashSet() print "=====================================================================================" print "In planning.py deployed.name=" for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious print "In planning.py deployed.name=" + deployed.name if appname in deployed.name and (_delta.operation == "CREATE" or _delta.operation == "MODIFY"): current_container = deployed.container result.add(current_container) return result
def sort_deployeds(): deployeds_per_key = HashMap() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious if deployed.type == "rh.TomcatWARModule": key = "%s___%s" % (deployed.appName, deployed.container.name) if deployeds_per_key.containsKey(key): deployeds_per_key.get(key).add(deployed) else: result = HashSet() result.add(deployed) deployeds_per_key.put(key, result) return deployeds_per_key
def disFreeBSD(host_obj, client, Framework, langBund=None, host_is_virtual = False): myVec = ObjectStateHolderVector() cpuDescription = None cpuCount = 0 cpuSpeed = None vendor = '' result = client.execCmd('sysctl hw.model hw.ncpu hw.clockrate')#V@@CMD_PERMISION tty protocol execution if result: regexStr = 'hw\.model: (.*?)\s*\nhw\.ncpu: (\d+)\s*\nhw\.clockrate: (\d+)' matcher = re.search(regexStr, result) if matcher: cpuDescription = collapseWhitespaces(matcher.group(1)) cpuCount = int(matcher.group(2)) cpuSpeed = matcher.group(3) if re.search('Intel', cpuDescription): vendor = 'Intel' if cpuCount == 0: result = client.execCmd('dmesg | grep -A 1 "CPU:"')#V@@CMD_PERMISION tty protocol execution if result: regexStr = 'CPU:\s*([^\n]*)\((\d+)\.\d+-MHz.*?\)\s*?\n.*?Origin\s*?=\s*?"(.*?)\"' matcher = re.search(regexStr, result) if matcher: cpuDescription = collapseWhitespaces(matcher.group(1)) cpuSpeed = matcher.group(2) vendor = matcher.group(3) cpuCount = 1 cpuCountResult = client.execCmd('dmesg | grep "cpu\|Multiprocessor"')#V@@CMD_PERMISION tty protocol execution if cpuCountResult: cpuCountMatcher = re.search("Multiprocessor (System Detected|motherboard): (\d+) CPUs", cpuCountResult) if cpuCountMatcher: cpuCount = int(cpuCountMatcher.group(2)) else: cpuEntries = re.findall("(?i)cpu\d+ \(\w+?\):\s+?APIC ID:\s+?\d+", cpuCountResult) if cpuEntries: #filter out duplicates from multiple sessions set_ = HashSet() for entry in cpuEntries: set_.add(entry) cpuCount = set_.size() if cpuCount > 0: for i in range(cpuCount): cid = 'CPU' + str(i) myVec.add(makeCPUOSH(host_obj, cid, vendor, cpuSpeed, '', cpuDescription, is_virtual=host_is_virtual)) return myVec
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if deployed.hasProperty('restartContainer'): restartContainer = deployed.restartContainer if restartContainer and _delta.operation != "NOOP" and ( current_container.type in [ "jbossdm.Domain", "jbossdm.StandaloneServer", "jbossdm.Profile" ]): result.add(current_container) return result
def completeComponentSet(cls, componentSet): """ generated source for method completeComponentSet """ newComponents = HashSet() componentsToTry = HashSet(componentSet) while not componentsToTry.isEmpty(): for c in componentsToTry: for out in c.getOutputs(): if not componentSet.contains(out): newComponents.add(out) for in_ in c.getInputs(): if not componentSet.contains(in_): newComponents.add(in_) componentSet.addAll(newComponents) componentsToTry = newComponents newComponents = HashSet()
def mergeBaseRelations(self, rels): """ generated source for method mergeBaseRelations """ merges = HashMap() for rel in rels: if not merges.containsKey(name): merges.put(name, ArrayList()) addRelToMerge(rel, merge) rval = HashSet() valConst = GdlPool.getConstant("val") for c in merges.keySet(): body.add(c) for mergeSet in merge: Collections.sort(ms2, SortTerms()) body.add(GdlPool.getFunction(valConst, ms2)) rval.add(toAdd) return rval
def run(self): """ generated source for method run """ try: # Sleep for the first two seconds after which the cache is loaded, # so that we don't interfere with the user interface startup. Thread.sleep(2000) except InterruptedException as e: e.printStackTrace() return remoteRepository = RemoteGameRepository(self.theRepoURL) print "Updating the game cache..." beginTime = System.currentTimeMillis() # Since games are immutable, we can guarantee that the games listed # by the repository server includes the games in the local cache, so # we can be happy just updating/refreshing the listed games. theGameKeys = remoteRepository.getGameKeys() if theGameKeys == None: return # If the server offers a single combined metadata file, download that # and use it to avoid checking games that haven't gotten new versions. bundledMetadata = remoteRepository.getBundledMetadata() if bundledMetadata != None: for theKey in theGameKeys: try: if myGameVersion == None: continue # Skip updating the game cache entry if the version is the same # and the cache entry was written less than a week ago. if myGameVersion.getRepositoryURL() == remoteVersionedGameURL and getCacheEntryAge(theKey) < 604800000: unchangedKeys.add(theKey) except Exception as e: continue theGameKeys.removeAll(unchangedKeys) # Start threads to update every entry in the cache (or at least verify # that the entry doesn't need to be updated). theThreads = HashSet() for gameKey in theGameKeys: t.start() theThreads.add(t) # Wait until we've updated the cache before continuing. for t in theThreads: try: t.join() except InterruptedException as e: endTime = System.currentTimeMillis() print "Updating the game cache took: " + (endTime - beginTime) + "ms."
def getUserByExternalUid(self, uid, provider, userService): newFormat = "passport-%s:%s:%s" % ("saml", provider, uid) user = userService.getUserByAttribute("oxExternalUid", newFormat) if user == None: oldFormat = "passport-%s:%s" % ("saml", uid) user = userService.getUserByAttribute("oxExternalUid", oldFormat) if user != None: # Migrate to newer format list = HashSet(user.getAttributeValues("oxExternalUid")) list.remove(oldFormat) list.add(newFormat) user.setAttribute("oxExternalUid", ArrayList(list)) print "Migrating user's oxExternalUid to newer format 'passport-saml:provider:uid'" userService.updateUser(user) return user
def getBusinessServiceURI(ref, configMBean): envValueTypesToSearch = HashSet() envValueTypesToSearch.add(EnvValueTypes.SERVICE_URI); evquery = EnvValueQuery(None, envValueTypesToSearch, Collections.singleton(ref), False, None, False) founds = configMBean.findEnvValues(evquery); uri = "" for qev in founds: if (qev.getEnvValueType() == EnvValueTypes.SERVICE_URI): uri = qev.getValue() return uri;
def getRequestedPortsSet(self): services = self.Framework.getParameter('P2PServerPorts') if logger.isDebugEnabled(): logger.debug('Requested services:', services) if (services == None) or (len(services) == 0) or (services == '*'): return None names = services.split(',') portsSet = HashSet() for name in names: portNums = self.knownPortsConfigFile.getPortByName(name) if portNums == None: try: portNums = [Integer.parseInt(name)] except: logger.debug('Failed to resolve service port number:', name) continue for portNum in portNums: portsSet.add(portNum) return portsSet
def getIPAddressObjects(api, filters): found = 0 ipMap = {} ipSet = HashSet() try: ipStub = api.getStub(NnmServicesEnum().IPAddress) for filter in filters: allIpsArray = ipStub.getIPAddresses(filter) allIps = allIpsArray.getItem() if allIps != None: found = 1 logger.debug("Retrieved %s IPAddress Objects" % (len(allIps))) for i in range(len(allIps)): if (notNull(allIps[i].getId()) and notNull(allIps[i].getHostedOnId()) and notNull(allIps[i].getIpValue()) and notNull(allIps[i].getCreated()) and notNull(allIps[i].getModified())): ## Don't add duplicate IPs if ipSet.contains(allIps[i].getId()): logger.debug("########Found duplicate IP" + allIps[i].getIpValue()) continue else: ipSet.add(allIps[i].getId()) ipMap[allIps[i].getId()] = UIp(allIps[i].getId(), allIps[i].getHostedOnId(), allIps[i].getIpSubnetId(), allIps[i].getInInterfaceId(), allIps[i].getIpValue(), allIps[i].getPrefixLength(), allIps[i].getCreated(), allIps[i].getModified()) else: break except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) errMsg = 'Exception:\n %s' % stacktrace logger.error(errMsg) api.Framework.reportError(errMsg) if found: logger.debug('Created a dictionary of %d IPAddress objects' % (len(ipMap))) else: errMsg = 'Did not find any IPAddress objects' logger.debug(errMsg) api.Framework.reportError(errMsg) return ipMap
def getAvailMethodsUser(self, user, skip=None): methods = HashSet() for method in self.authenticators: try: module = self.authenticators[method] if module.hasEnrollments(module.configAttrs, user): methods.add(method) except: print "Casa. getAvailMethodsUser. hasEnrollments call could not be issued for %s module" % method try: if skip != None: # skip is guaranteed to be a member of methods (if hasEnrollments routines are properly implemented). # A call to remove strangely crashes when skip is absent methods.remove(skip) except: print "Casa. getAvailMethodsUser. methods list does not contain %s" % skip print "Casa. getAvailMethodsUser %s" % methods.toString() return methods
def getKnownListeningPortsSet(self): ports = self.Framework.getParameter('knownListeningPorts') portsSet = HashSet() if logger.isDebugEnabled(): logger.debug('Known Listening Ports:', ports) if (ports == None) or (len(ports) == 0): return None if (ports == '*'): portsSet.add('*') return portsSet names = ports.split(',') for name in names: portNums = self.knownPortsConfigFile.getPortByName(name) if portNums == None: try: portNums = [Integer.parseInt(name)] except: logger.debug('Failed to resolve service port number:', name) continue for portNum in portNums: portsSet.add(portNum) return portsSet
def buildClassifier(results): print "building classifier..." metadataFields = HashSet() metadataFields.add("molecule name") metadataFields.add("basis set") #metadataFields.add("run type") metadataFields.add("scf type") metadataFields.add("node count") metadataFields.add("core count") # metadataFields.add("CPU MHz") # i.e. 1995.002 # metadataFields.add("CPU Cores") # i.e. 2 # metadataFields.add("OS Machine") # i.e. Linux # metadataFields.add("Cache Size") # i.e. 4096 KB # for performance classifier = CQoSClassifierOperation(results, "Time", metadataFields, "scf type") # for accuracy # classifier = CQoSClassifierOperation(results, "accuracy", metadataFields, "basis set") classifier.setClassifierType(CQoSClassifierOperation.NAIVE_BAYES) #classifier.setClassifierType(CQoSClassifierOperation.SUPPORT_VECTOR_MACHINE) classifier.processData() classifier.writeClassifier(fileName) print "...done."
def exportAll(): try: ALSBConfigurationMBean = findService( "ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == "None": ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None: print "Export the config" theBytes = ALSBConfigurationMBean.export( collection, true, None) else: print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export( collection, true, passphrase) else: ref = Ref.makeProjectRef(project) print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects( collection, passphrase) aFile = File(exportJar) out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: " + exportJar + " has been exported" if customFile != "None": print collection # see com.bea.wli.sb.util.EnvValueTypes in sb-kernel-api.jar for the values #EnvValueQuery evquery = # new EnvValueQuery( # null, // search across all resource types # Collections.singleton(EnvValueTypes.URI_ENV_VALUE_TYPE), // search only the URIs # null, // search across all projects and folders. # true, // only search across resources that are # // actually modified/imported in this session # "localhost", // the string we want to replace # false // not a complete match of URI. any URI # // that has "localhost" as substring will match # ); refTypes = HashSet() refTypes.add(EnvValueTypes.SERVICE_URI_TABLE) refTypes.add(EnvValueTypes.SERVICE_URI) query = EnvValueQuery( Collections.singleton(Refs.BUSINESS_SERVICE_TYPE), refTypes, collection, false, "search string", false) # query = EnvValueQuery(None, Collections.singleton(EnvValueTypes.SERVICE_URI_TABLE), collection, false, "search string", false) customEnv = FindAndReplaceCustomization('new endpoint url', query, 'replace string') # object = QualifiedEnvValue(Refs.makeBusinessSvcRef(ref,'file'), Refs.BUSINESS_SERVICE_TYPE, "XSDvalidation/file", "aaa") # objects = ArrayList() # objects.add(object) # customEnv2 = EnvValueCustomization('Set the right endpoints', objects) print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) # customList.add(customEnv2) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() print "ALSB Dummy Customization file: " + customFile + " has been created" except: raise