def handleGrantNumber(self): out = ByteArrayOutputStream() req = SearchRequest("grant_numbers:%s*" % self.term) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): creatorResults = res.getResults() for creatorRes in creatorResults: creatorList = creatorRes.getList("grant_numbers") if (creatorList.isEmpty()==False): for hit in creatorList: hits.add(hit) self.writer.print("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.print(",\"%s\"" % hit) else: self.writer.print("\"%s\"" % hit) hitnum += 1 self.writer.print("]") else: self.writer.println("[\"\"]") self.writer.close()
def createNG(): exist = doesNodeGroupExist(ngName) if (exist == "true"): print "Node group " + ngName + " already exists. Skipping its creation..." return nodes = convertToList(AdminConfig.list("Node")) nodesForNG = HashSet() for node in nodes: nodeName = AdminConfig.showAttribute(node, "name") nodeagentid = AdminConfig.getid("/Node:" + nodeName + "/Server:nodeagent/") if (nodeagentid != None and nodeagentid != ""): nodeservers = convertToList(AdminConfig.list("Server", node)) addNode = 1 for server in nodeservers: serverType = AdminConfig.showAttribute(server, "serverType") if (serverType == "ONDEMAND_ROUTER"): addNode = 0 break if (addNode == 1): nodesForNG.add(nodeName) print "Creating node group " + ngName AdminTask.createNodeGroup(ngName) for nd in nodesForNG: AdminTask.addNodeGroupMember(ngName, ["-nodeName", nd])
def second(extracted): print "getting stats..." dostats = BasicStatisticsOperation(extracted, False) stats = dostats.processData() print "...done." print "drawing charts..." for metric in stats.get(0).getMetrics(): grapher = DrawMMMGraph(stats) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setSortXAxis(True) grapher.setLogYAxis(True) grapher.setStripXName("MPI_Send\(\) \[ <message size> = <") grapher.setTitle("NPB3.2.1 mg.A.4 - MPI_Send() Performance: " + metric) grapher.setSeriesType(DrawMMMGraph.TRIALNAME) grapher.setCategoryType(DrawMMMGraph.EVENTNAME) grapher.setValueType(AbstractResult.EXCLUSIVE) grapher.setXAxisLabel("Message Size") grapher.setYAxisLabel("Exclusive " + metric) # grapher.setLogYAxis(True) grapher.processData() print "...done." return
def handleQuery(self, query, fieldName, formatStr): out = ByteArrayOutputStream() req = SearchRequest(query) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): results = res.getResults() for searchRes in results: searchResList = searchRes.getList(fieldName) if (searchResList.isEmpty()==False): for hit in searchResList: if self.term is not None: if hit.find(self.term) != -1: hits.add(hit) else: hits.add(hit) self.writer.print("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.print(","+formatStr % {"hit":hit}) else: self.writer.print(formatStr % {"hit":hit}) hitnum += 1 self.writer.print("]") else: self.writer.println("[\"\"]") self.writer.close()
def updateDomains(self): """ generated source for method updateDomains """ changedSomething = True while changedSomething: changedSomething = False for d in domains.values(): for intSet in d.functionRefs: for d2 in intSet: if d2 != None: if domain == None: domain = HashSet(d2.values) else: domain.retainAll(d2.values) if domain != None: d.values.addAll(domain) if d.loc != None: if name == "does": newLoc.name = GdlPool.getConstant("legal") newLoc.idx = d.loc.idx if otherDom == None: raise RuntimeException("Uh oh, missed a legal") d.values.addAll(otherDom.values) elif name == "true": newLoc.name = GdlPool.getConstant("next") newLoc.idx = d.loc.idx if otherDom == None: raise RuntimeException("Uh oh, missed a next") d.values.addAll(otherDom.values) if len(d.values) != before: changedSomething = True
def prepareClientRedirectUris(self, configurationAttributes): clientRedirectUrisSet = HashSet() if not configurationAttributes.containsKey("client_redirect_uris"): return clientRedirectUrisSet clientRedirectUrisList = configurationAttributes.get( "client_redirect_uris").getValue2() if StringHelper.isEmpty(clientRedirectUrisList): print "Casa client registration. The property client_redirect_uris is empty" return clientRedirectUrisSet clientRedirectUrisArray = StringHelper.split(clientRedirectUrisList, ",") if ArrayHelper.isEmpty(clientRedirectUrisArray): print "Casa client registration. No clients specified in client_redirect_uris property" return clientRedirectUrisSet # Convert to HashSet to quick search i = 0 count = len(clientRedirectUrisArray) while i < count: uris = clientRedirectUrisArray[i] clientRedirectUrisSet.add(uris) i = i + 1 return clientRedirectUrisSet
def recordPropositions(self): """ generated source for method recordPropositions """ propositions = HashSet() for component in components: if isinstance(component, (Proposition, )): propositions.add(component) return propositions
class CyclicTypeRecorder(object): def __init__(self): self.count = 0 self.elements = HashMap() self.used = HashSet() def push(self, t): self.count += 1 self.elements[t] = self.count return self.count def pop(self, t): del self.elements[t] if t in self.used: self.used.remove(t) def visit(self, t): i = self.elements.get(t) if i is not None: self.used.add(t) return i def isUsed(self, t): return t in self.used
def getFeatures(imPath, p): features = HashSet() im = IJ.openImage(imPath) SIFT(FloatArray2DSIFT(p)).extractFeatures(im.getProcessor(), features) IJ.log(str(features.size()) + ' features extracted') im.close() return features
def prepareClientsSet(self, configurationAttributes): clientsSet = HashSet() if (not configurationAttributes.containsKey("allowed_clients")): return clientsSet allowedClientsList = configurationAttributes.get( "allowed_clients").getValue2() if (StringHelper.isEmpty(allowedClientsList)): print "UMA authorization policy. Initialization. The property allowed_clients is empty" return clientsSet allowedClientsListArray = StringHelper.split(allowedClientsList, ",") if (ArrayHelper.isEmpty(allowedClientsListArray)): print "UMA authorization policy. Initialization. There aren't clients specified in allowed_clients property" return clientsSet # Convert to HashSet to quick search i = 0 count = len(allowedClientsListArray) while (i < count): client = allowedClientsListArray[i] clientsSet.add(client) i = i + 1 return clientsSet
def getProcessesToFilter(self): filterProcesses = self.Framework.getParameter('filterP2PProcessesByName') if filterProcesses != None: self.ignoredProcesses = HashSet() for procName in filterProcesses.split(','): if len(procName) > 0: self.ignoredProcesses.add(procName.lower())
def __init__(self, client_addr, server_addr, operations_map, is_simulator=False): self.client_addr = client_addr self.server_addr = server_addr self.is_simulator = is_simulator self.operations = dict(operations_map) self.operations["help"] = "help <optional command>" self.operations["login"] = "******" self.operations["logout"] = "logout" self.operations["echo"] = "echo text" self.operations["exit"] = "exit" self.aux_commands = {"help", "login", "logout", "echo", "exit"} self.op_commands = set(operations_map.keySet()) self.all_commands = set(self.op_commands) self.all_commands.update(self.aux_commands) all_commands_set = HashSet() for x in self.all_commands: all_commands_set.add(x) try: self.console = ConsoleReader() self.console.addCompleter(StringsCompleter(all_commands_set)) self.console.setPrompt("prompt> ") except IOException as err: err.printStackTrace()
def handleWorkflowStep(self): out = ByteArrayOutputStream() req = SearchRequest("workflow_step_label:[* TO *]" ) req.setParam("fq", 'item_type:"object"') req.setParam("fq", 'workflow_id:"dataset"') req.setParam("rows", "1000") self.indexer.search(req, out) res = SolrResult(ByteArrayInputStream(out.toByteArray())) hits = HashSet() if (res.getNumFound() > 0): recordTypeResults = res.getResults() for recordTypeResult in recordTypeResults: recordTypeList = recordTypeResult.getList("workflow_step_label") if (recordTypeList.isEmpty()==False): for hit in recordTypeList: hits.add(hit) self.writer.println("[") hitnum = 0 for hit in hits: if (hitnum > 0): self.writer.println(",{\"value\": \"%s\",\n\"label\": \"%s\"}" % (hit,hit)) else: self.writer.println("{\"value\": \"%s\",\n\"label\": \"%s\"}" % (hit,hit)) hitnum += 1 self.writer.println("]") else: self.writer.println("[\"\"]") self.writer.close()
def save_and_get_complex(): '''Test saving entities containing mapped collection properties''' entity1 = TestEntities.ComplexEntity() entity1.setId("complex1") strings = ArrayList() strings.add("one") strings.add("two") entity1.setStringList(strings) ints = HashSet() ints.add(1) ints.add(2) entity1.setIntSet(ints) extended = HashMap() extended.put("prop1", "one") extended.put("prop2", "two") entity1.setExtendedProps(extended) service = EntityService(TestEntities.ComplexEntity) service.save(entity1) entity2 = service.get("complex1") assertNotNull(entity2) assertEquals(entity2.getId(), entity1.getId()) assertTrue(entity2.getStringList().contains("one")) assertTrue(entity2.getStringList().contains("two")) assertTrue(entity2.getIntSet().contains(java.lang.Long(1))) assertTrue(entity2.getIntSet().contains(java.lang.Long(2))) assertNotNull(entity2.getExtendedProps()) assertEquals(entity2.getExtendedProps().get("prop1"), "one") assertEquals(entity2.getExtendedProps().get("prop2"), "two")
def drawGraph(results, inclusive): print "drawing charts..." for metric in results.get(0).getMetrics(): grapher = DrawGraph(results) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setLogYAxis(False) grapher.setShowZero(True) grapher.setTitle(inApp + ": " + inExp + ": " + metric) grapher.setSeriesType(DrawGraph.EVENTNAME) grapher.setUnits(DrawGraph.SECONDS) grapher.setCategoryType(DrawGraph.TRIALNAME) grapher.setXAxisLabel("Trial Date") grapher.setShortenNames(True) if inclusive == True: grapher.setValueType(AbstractResult.INCLUSIVE) grapher.setYAxisLabel("Inclusive " + metric + " (seconds)") else: grapher.setValueType(AbstractResult.EXCLUSIVE) grapher.setYAxisLabel("Exclusive " + metric + " (seconds)") grapher.processData() if inclusive == True: grapher.drawChartToFile(outFile1) else: grapher.drawChartToFile(outFile2) print "...done." return
def getBestVariable(self, functionalSentence, functionInfo): """ generated source for method getBestVariable """ # If all the variables that can be set by the functional sentence are in # the varOrdering, we return null. Otherwise, we return one of # those with the largest domain. # The FunctionInfo is sentence-independent, so we need the context # of the sentence (which has variables in it). tuple_ = GdlUtils.getTupleFromSentence(functionalSentence) dependentSlots = functionInfo.getDependentSlots() if len(tuple_) != len(dependentSlots): raise RuntimeException("Mismatched sentence " + functionalSentence + " and constant form " + functionInfo) candidateVars = HashSet() i = 0 while i < len(tuple_): if isinstance(term, (GdlVariable, )) and dependentSlots.get(i) and not self.varOrdering.contains(term) and self.varsToAssign.contains(term): candidateVars.add(term) i += 1 # Now we look at the domains, trying to find the largest bestVar = None bestDomainSize = 0 for var in candidateVars: if domainSize > bestDomainSize: bestVar = var bestDomainSize = domainSize return bestVar
def getNodeObjects(api, filters): found = 0 ndMap = {} ndSet = HashSet() lanSwitchCapability = "com.hp.nnm.capability.node.lanswitching" ipForwardingCapability = "com.hp.nnm.capability.node.ipforwarding" try: ndStub = api.getStub(NnmServicesEnum().Node) for filter in filters: allNodesArray = ndStub.getNodes(filter) allNodes = allNodesArray.getItem() if allNodes != None: found = 1 logger.debug("Retrieved %s Node Objects" % (len(allNodes))) for i in range(len(allNodes)): if (notNull(allNodes[i].getId()) and notNull(allNodes[i].getName()) and notNull(allNodes[i].getCreated()) and notNull(allNodes[i].getModified())): ## Don't add duplicate Nodes if ndSet.contains(allNodes[i].getId()): continue else: ndSet.add(allNodes[i].getId()) # The capabilities com.hp.nnm.capability.node.lanswitching and # com.hp.nnm.capability.node.ipforwarding have replaced isLanSwitch and isIPv4Router respectively. isLanSwitch = 0 isRouter = 0 caps = allNodes[i].getCapabilities() if (notNull(caps)): for cap in caps: key = cap.getKey().strip() if (key == lanSwitchCapability): isLanSwitch = 1 if (key == ipForwardingCapability): isRouter = 1 ndMap[allNodes[i].getId()] = UNode(allNodes[i].getId(), allNodes[i].getName(), isRouter, isLanSwitch, allNodes[i].getSystemName(), allNodes[i].getSystemContact(), allNodes[i].getSystemDescription(), allNodes[i].getSystemLocation(), allNodes[i].getSystemObjectId(), allNodes[i].getLongName(), allNodes[i].getSnmpVersion(), allNodes[i].getDeviceModel(), allNodes[i].getDeviceVendor(), allNodes[i].getDeviceFamily(), allNodes[i].getDeviceDescription(), allNodes[i].getDeviceCategory(), '', '') else: break except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) errMsg = 'Exception:\n %s' % stacktrace logger.error(errMsg) api.Framework.reportWarning(errMsg) if found: logger.debug('Created a dictionary of %d Node objects' % (len(ndMap))) else: errMsg = 'Did not find any Node objects' logger.debug(errMsg) api.Framework.reportWarning(errMsg) return ndMap
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if _delta.operation != "NOOP" and current_container.type == "iib.Server": result.add(current_container) return result
def __toJavaSetInternal(val): ''' Does the actual conversion of the elements inside of the set or frozenset to Set ''' retObj = HashSet() for v in val : retObj.add(JUtil.pyValToJavaObj(v)) return retObj
def getScripts(self, menu): from java.util import HashSet scriptList = HashSet() if self.menuToProcMap.has_key(menu): menuItems = self.menuToProcMap[menu] for item in menuItems: scriptList.add(str(item)) return scriptList
def getContext_0(cls, state, roles, moves): """ generated source for method getContext_0 """ context = HashSet(state.getContents()) i = 0 while i < len(roles): context.add(toDoes(roles.get(i), moves.get(i))) i += 1 return context
def getModelFromPoints(sourcePoints, targetPoints): rigidModel = RigidModel2D() pointMatches = HashSet() for a in zip(sourcePoints, targetPoints): pm = PointMatch(Point([a[0][0], a[0][1]]), Point([a[1][0], a[1][1]])) pointMatches.add(pm) rigidModel.fit(pointMatches) return rigidModel
def getStateFromBase(self): """ generated source for method getStateFromBase """ contents = HashSet() for p in propNet.getBasePropositions().values(): p.setValue(p.getSingleInput().getValue()) if p.getValue(): contents.add(p.__name__) return MachineState(contents)
def containers(): result = HashSet() for _delta in specification.deltas: depl = _delta.deployedOrPrevious current_container = depl.container if depl.type in ("soa.CompositeSOADeployable","soa.Composite") and current_container.type in ("wls.Server",'wls.Cluster'): result.add(current_container) return result
def getUncachedGameKeys(self): """ generated source for method getUncachedGameKeys """ theKeys = HashSet() for game in File("games/test").listFiles(): if not game.__name__.endsWith(".kif"): continue theKeys.add(game.__name__.replace(".kif", "")) return theKeys
def deployeds(): result = HashSet() for _delta in deltas.deltas: depl = _delta.deployedOrPrevious current_container = depl.container if _delta.operation != "NOOP" and depl.type == "soa.ExtensionLibrary" and current_container.type in ("wls.SOAServer",'wls.SOACluster'): result.add(depl) return result
def containers(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious current_container = deployed.container if delta.operation != "NOOP" and current_container.type == "wlp.Server" and current_container.rebootServer is True: result.add(current_container) return result
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if _delta.operation != "NOOP" and current_container.type == "example.Server": result.add(current_container) return result
def get_conf_mbean(conn, mbean_class, sessionId): conf_name = ObjectName("com.bea:Name=" + mbean_class.NAME + sessionId + ",Type=" + mbean_class.TYPE) mbeans = HashSet() mbeans.addAll(conn.queryNames(conf_name, None)) return MBeanServerInvocationHandler.newProxyInstance( conn, mbeans.iterator().next(), mbean_class, false)
def glue(): print "doing long run test" # load the trial Utilities.setSession("perfdmf_test") trial1 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.first") result1 = TrialMeanResult(trial1) trial2 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.second") result2 = TrialMeanResult(trial2) trial3 = Utilities.getTrial("gtc_bench", "jaguar.longrun", "64.third") result3 = TrialMeanResult(trial3) # merge the trials together - they have different metrics merger = MergeTrialsOperation(result1) merger.addInput(trial2) # merger.addInput(trial3) merged = merger.processData() # extract the interval events reducer = ExtractPhasesOperation(merged.get(0), "Iteration") reduceds = reducer.processData() reduced = reduceds.get(0) for event in reduced.getEvents(): for metric in reduced.getMetrics(): for thread in reduced.getThreads(): if event.find("measurement") >= 0: print metric, thread, reduced.getInclusive( thread, event, metric) # do the correlation correlation = CorrelationOperation(reduced) outputs = correlation.processData() result = outputs.get(0) # type = AbstractResult.INCLUSIVE; # for event in result.getEvents(): # for metric in result.getMetrics(): # for thread in result.getThreads(): # if event.find("INCLUSIVE") >= 0: # print event, CorrelationResult.typeToString(thread), metric, ":", AbstractResult.typeToString(type), result.getDataPoint(thread, event, metric, type) events = ArrayList() for event in merged.get(0).getEvents(): if event.find("Iteration") >= 0: events.add(event) extractor = ExtractEventOperation(merged.get(0), events) extracted = extractor.processData().get(0) for metric in extracted.getMetrics(): grapher = DrawGraph(extracted) metrics = HashSet() metrics.add(metric) grapher.set_metrics(metrics) grapher.setCategoryType(DrawGraph.EVENTNAME) grapher.setValueType(AbstractResult.INCLUSIVE) grapher.setLogYAxis(True) grapher.processData()
def addQualifier(self, type, value): if type and value: qualifiers = None if self.typeToQualifiers.has_key(type): qualifiers = self.typeToQualifiers[type] else: qualifiers = HashSet() self.typeToQualifiers[type] = qualifiers qualifiers.add(value)
def getUniqueSrcIps(self, protocol=6): uniqueIps = HashSet() srcAddrSqlBuilder = SelectSqlBuilder('Agg_V5', 'srcAddr as ip', distinct=1) srcAddrSqlBuilder.where('prot=%d' % protocol) srcIps = self._sqlClient.execute(srcAddrSqlBuilder) if srcIps: uniqueIps.addAll(Arrays.asList([ipEntry.ip for ipEntry in srcIps])) return uniqueIps.toArray()
def containers(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious current_container = deployed.container if current_container.type == "wlp.Server": logger.info("adding container: %s to the containers to update their features" % current_container ) result.add(current_container) return result
def get(self, sentence, varRenamedSentence): """ generated source for method get """ cacheContents = self.contents.get(varRenamedSentence) if cacheContents == None: return None results = HashSet() for answer in cacheContents: results.add(Unifier.unify(sentence, answer)) return ArrayList(results)
def sliceSpaces( spaces): checked = HashSet() slices = [] for i, space in enumerate(spaces): if not checked.contains(space): current = MeshMaker.createNewVerticalSlice(spaces, i) checked.addAll(current.tiles) slices.append(current) return slices
def test_HashSet(self): """create HashSet in JVM (from the JavaSet) """ hashSet = HashSet(self.javaSet) # print "created HashSet:", hashSet, type(hashSet) self.assertEqual(self.javaSet.size(), hashSet.size(), "HashSet has same size") elem0 = list(self.testSet)[0] self.assertTrue(hashSet.contains(elem0))
def getUncachedGameKeys(self): """ generated source for method getUncachedGameKeys """ theGameKeys = HashSet() try: while i < len(theArray): theGameKeys.add(theArray.getString(i)) i += 1 except Exception as e: e.printStackTrace() return theGameKeys
def __init__(self, root=None, outdir=None): """ generated source for method __init__ """ self.rootPath = root self.outDir = outdir self.fileStyles = HashMap() self.outDir = None #File() self.rootPath = str() self.seenDef = HashSet() self.seenRef = HashSet()
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if deployed.hasProperty('restartContainer'): restartContainer = deployed.restartContainer if restartContainer and _delta.operation != "NOOP" and (current_container.type in ["jbossdm.Domain","jbossdm.StandaloneServer","jbossdm.Profile"]): result.add(current_container) return result
def getVarsInLiveConjuncts(cls, rule, constantSentenceForms): """ generated source for method getVarsInLiveConjuncts """ result = HashSet() for literal in rule.getBody(): if isinstance(literal, (GdlRelation, )): if not SentenceModelUtils.inSentenceFormGroup(literal, constantSentenceForms): result.addAll(GdlUtils.getVariables(literal)) elif isinstance(literal, (GdlNot, )): if not SentenceModelUtils.inSentenceFormGroup(inner, constantSentenceForms): result.addAll(GdlUtils.getVariables(literal)) return result
def unique_supported_deployables(): result = HashSet() for delta in deltas.deltas: deployed = delta.deployedOrPrevious has_checksum = hasattr(deployed.deployable, "checksum") is_supported_operation = delta.operation == "CREATE" or delta.operation == "MODIFY" if has_checksum and is_supported_operation: result.add(deployed.deployable) return result
def ask(self, query, context, askOne): """ generated source for method ask """ goals = LinkedList() goals.add(query) answers = HashSet() alreadyAsking = HashSet() self.ask(goals, KnowledgeBase(context), Substitution(), ProverCache(), VariableRenamer(), askOne, answers, alreadyAsking) results = HashSet() for theta in answers: results.add(Substituter.substitute(query, theta)) return results
def __init__(self, Framework): self.Framework = Framework self.conn = self.Framework.getProbeDatabaseConnection(ProcessToProcess.CONTEXT) self.knownPortsConfigFile = self.Framework.getConfigFile(CollectorsParameters.KEY_COLLECTORS_SERVERDATA_PORTNUMBERTOPORTNAME) self.shouldIgnoreLocal = Boolean.parseBoolean(self.Framework.getParameter('ignoreP2PLocalConnections')) self.knownListeningPorts = self.getKnownListeningPortsSet() self.requestedServices = self.getRequestedPortsSet() self.hostID = Framework.getDestinationAttribute('hostId') self.ignoredProcesses = HashSet() self.processMap = {} self.getProcessesToFilter()
def webcontainers(appname): result = HashSet() print "=====================================================================================" print "In planning.py deployed.name=" for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious print "In planning.py deployed.name=" + deployed.name if appname in deployed.name and (_delta.operation == "CREATE" or _delta.operation == "MODIFY"): current_container = deployed.container result.add(current_container) return result
def sort_deployeds(): deployeds_per_key = HashMap() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious if deployed.type == "rh.TomcatWARModule": key = "%s___%s" % (deployed.appName, deployed.container.name) if deployeds_per_key.containsKey(key): deployeds_per_key.get(key).add(deployed) else: result = HashSet() result.add(deployed) deployeds_per_key.put(key, result) return deployeds_per_key
def containers(): result = HashSet() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious current_container = deployed.container if deployed.hasProperty('restartContainer'): restartContainer = deployed.restartContainer if restartContainer and _delta.operation != "NOOP" and ( current_container.type in [ "jbossdm.Domain", "jbossdm.StandaloneServer", "jbossdm.Profile" ]): result.add(current_container) return result
def __init__(self, name): self.name = name self.description = None self.mode = None self.maxPrimaries = None self.desiredPrimaries = None self.isSystem = None self.autoStartOnNewCluster = None self.isFailback = None self.isManaged = None self.configuredNodes = HashSet() self.onlineNodes = HashSet() self.groupIps = HashSet()
def processChunk(self, chunk): if chunk.type != "entry": return self.dao.beginTransaction() self.keeper.record("Beginning chunk %s" % chunk.value) headword = self.no_lengths_expr.sub("", chunk.value) seq_num = 1 match = self.headword_expr.search(headword) if match != None: headword = match.group(1) seq_num = int(match.group(2)) self.keeper.record("Split headword") matches = self.dao.getMatchingLemmas(headword, seq_num, self.cur_lang) self.keeper.record("Got matching lemmas") if len(matches) == 0: print "CREATING:", headword lemma = Lemma() lemma.headword = headword lemma.sequenceNumber = seq_num lemma.languageID = self.cur_lang_id lemma.shortDefinition = None # worry about this later lemma.lexiconQueries = HashSet() lemma.authorityName = "%s%d(%s)" % \ (headword, seq_num, self.cur_lang) lemma.displayName = headword lemma.sortableString = "%s%s" % (headword, seq_num) self.dao.insertLemma(lemma) matches.add(lemma) for m in matches: m.getLexiconChunks().add(chunk) self.keeper.record("Added chunk") self.dao.updateLemma(m) self.keeper.record("Updated lemma") self.entry_count += 1 if self.entry_count % 30 == 0: new_time = time.time() print "[%6d] %s / %d / %s : %.5f" % \ (self.entry_count, headword, seq_num, self.cur_lang, \ new_time - self.last_time), self.last_time = new_time print "->", ", ".join([l.headword for l in matches]) HibernateUtil.getSession().flush() HibernateUtil.getSession().clear() #self.dao.beginTransaction() self.keeper.record("FLUSHED session") #print self.keeper.getResults() self.keeper.stop() self.keeper = Timekeeper()
def getCapabilities(self): # -> Capabilities try: return Capabilities(HashSet([ProtocolRef(URI("SAOP"))])) except: getReporter().log(Level.SEVERE, "Failed to create capabilities URI", e) return None
def __init__(self, context, fields=[]): if fields: Cursor.__init__(self, context, HashSet(fields)) else: Cursor.__init__(self, context) self.id = None self.name = None self.context = context