def __init__(self, fn, maxsize=30): self.fn = fn # Synchronize map to ensure correctness in a multi-threaded application: # (I.e. concurrent threads will wait on each other to access the cache) self.m = Collections.synchronizedMap( LRUCache(maxsize, eldestFn=lambda ref: ref.clear())) self.locks = Collections.synchronizedMap(HashMap())
def append_to_errors(self, errorsList, check): """Clean errors list from duplicate errors and ids that must be ignored """ if errorsList[0][0] == "": #osmId == "", this tool doesn't give id of OSM objects check.errors = [Error(check, e) for e in errorsList] else: if check.ignoreIds != []: #remove OSM objects that the user wants to ignore check.errors = [Error(check, e) for e in errorsList if e[0] not in check.ignoreIds] #remove duplicate ids #check.errors = dict((e.osmId, e) for e in check.errors).values() else: #copy all errors and remove duplicate ids #check.errors = dict((e[0], Error(e)) for e in errorsList).values() check.errors = [Error(check, e) for e in errorsList] #Remove from the list of errors those that have been reviewed yet #while clicking the "Next" button check.errors = [e for e in check.errors if e.osmId not in check.reviewedIds] #print "\n- errors of selected check in current zone:", [e.osmId for e in check.errors] #Randomize the errors so that different users don't start #correcting the same errors Collections.shuffle(check.errors) #Filter errors in favourite zone if self.app.favouriteZoneStatus and self.app.favZone.zType != "rectangle": #not rectangular favourite area, use jts from com.vividsolutions.jts.geom import Coordinate, GeometryFactory polygon = self.app.favZone.wktGeom errorsInPolygon = [] for error in check.errors: (lat, lon) = error.coords point = GeometryFactory().createPoint(Coordinate(lon, lat)) if polygon.contains(point): if error not in errorsInPolygon: errorsInPolygon.append(error) check.errors = errorsInPolygon #Apply limits from preferences #max number of errors limits = [] if self.app.maxErrorsNumber != "": limits.append(self.app.maxErrorsNumber) try: if self.tool.prefs["limit"] != "": limits.append(int(self.tool.prefs["limit"])) except: pass if limits != []: check.errors = check.errors[:min(limits)] #Reset index of current error check.currentErrorIndex = -1 check.toDo = len(check.errors)
def exportAll(exportConfigFile, generalConfigFile): try: print "Loading export config from :", exportConfigFile exportConfigProp = loadProps(exportConfigFile,generalConfigFile) adminUrl = exportConfigProp.get("adminUrl") user = exportConfigProp.get("user") passwd = exportConfigProp.get("password") jarFileName = exportConfigProp.get("jarFileName") customFile = exportConfigProp.get("customizationFile") passphrase = exportConfigProp.get("passphrase") project = exportConfigProp.get("project") connectToServer(user, passwd, adminUrl) print 'connected' ALSBConfigurationMBean = findService("ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == None : ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None : print "Export the config" theBytes = ALSBConfigurationMBean.export(collection, true, None) else : print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export(collection, true, passphrase) else : ref = Ref.makeProjectRef(project); print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects(collection, passphrase) print 'fileName',jarFileName aFile = File(jarFileName) print 'file',aFile out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: "+ jarFileName + " has been exported" if customFile != None: print collection query = EnvValueQuery(None, Collections.singleton(EnvValueTypes.WORK_MANAGER), collection, false, None, false) customEnv = FindAndReplaceCustomization('Set the right Work Manager', query, 'Production System Work Manager') print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() # print "ALSB Dummy Customization file: "+ customFile + " has been created" except: raise
def getRandomWord(self): """ generated source for method getRandomWord """ if self.scrambledTokens.isEmpty(): for word in WordList.words: self.scrambledTokens.add(word + self.scrambledPrefix) Collections.shuffle(self.scrambledTokens, self.random) self.scrambledPrefix += 1 return self.scrambledTokens.pop()
def setFile(self, file): try: self.spritefile = Spritefile(file) self.items = LinkedList(self.spritefile.sprites.keySet()) Collections.sort(self.items) except: self.items = [] self.cache = {} self.positions = []
def __init__(self, theRandom): """ generated source for method __init__ """ super(MappingGdlScrambler, self).__init__() self.random = theRandom self.scrambleMapping = HashMap() self.unscrambleMapping = HashMap() self.scrambledPrefix = 0 self.scrambledTokens = Stack() for word in WordList.words: self.scrambledTokens.add(word) Collections.shuffle(self.scrambledTokens, self.random)
def run(self): # sanity check: if len(self.sources) == 0: raise Exception("No sources defined") if len(self.sinks) == 0: raise Exception("No sinks defined") # create a plan: specs = [] pipemap = {} for sink in self.sinks: spec = JobSpec(self._jobid(), self.workpath) spec.outputpath = sink.sinkpath spec.outputformat = sink.outputformat spec.outputJson = sink.json spec.compressoutput = sink.compressoutput spec.compressiontype = sink.compressiontype specs.append(spec) if len(sink.sources) != 1: raise Exception("Sinks can only have one source: " + sink) self._walkPipe(spec, sink.sources[0], specs, pipemap) # sort out paths for jobs: self._configureJobs(specs) # run jobs: _log.info("Working directory is " + self.workpath) _log.info(str(len(specs)) + " job(s) found from " + str(len(self.pipes)) + " pipe action(s)") happy.dfs.delete(self.workpath) jobsDone = Collections.synchronizedSet(HashSet()) jobResults = Collections.synchronizedList(ArrayList()) jobsStarted = sets.Set() while jobsDone.size() < len(specs): # only keep 3 jobs in flight: for spec in specs: id = spec.id if id not in jobsStarted: parentIds = [parent.id for parent in spec.parents] if jobsDone.containsAll(parentIds): thread = threading.Thread(name="Cloud Job " + str(id), target=self._runJob, args=(spec.getJob(), id, jobsDone, jobResults)) thread.setDaemon(True) thread.start() jobsStarted.add(id) if len(jobsStarted) - jobsDone.size() >= 3: break time.sleep(1) # compile results: results = {} for result in jobResults: for key, value in result.iteritems(): results.setdefault(key, []).extend(value) # check for errors: if self.hasErrors(): totalErrors = sum(results["happy.cloud.dataerrors"]) _log.error("*** " + str(totalErrors) + " DataException errors were caught during this run, look in " + \ self.workpath + "/errors to see details ***") return results
def exportAll(): try: ALSBConfigurationMBean = findService( "ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == "None": ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None: print "Export the config" theBytes = ALSBConfigurationMBean.export( collection, true, None) else: print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export( collection, true, passphrase) else: ref = Ref.makeProjectRef(project) print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects( collection, passphrase) aFile = File(exportJar) out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: " + exportJar + " has been exported" if customFile != "None": print collection query = EnvValueQuery( None, Collections.singleton(EnvValueTypes.WORK_MANAGER), collection, false, None, false) customEnv = FindAndReplaceCustomization( 'Set the right Work Manager', query, 'Production System Work Manager') print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() print "ALSB Dummy Customization file: " + customFile + " has been created" except: raise
def main(cls, args): """ generated source for method main """ description = GameRepository.getDefaultRepository().getGame("conn4").getRules() flattener = PropNetFlattener(description) flattened = flattener.flatten() print "Flattened description for connect four contains: \n" + len(flattened) + "\n\n" strings = ArrayList() for rule in flattened: strings.add(rule.__str__()) Collections.sort(strings) for s in strings: print s
def run(config): """Display the config properties data linage""" print '\nData linage for this configuration is:' v = Vector(data_linage.keySet()) Collections.sort(v) it = v.iterator() while (it.hasNext()): element = it.next() print ' [' + element + "]" print ' Defined In : ' + data_linage.get(element) print '\n' print '\n'
def _GetCurrentASTPath(context, reverse=False): ''' @return: ArrayList(SimpleNode) ''' from org.python.pydev.parser.fastparser import FastParser selection = _CreateSelection(context) ret = FastParser.parseToKnowGloballyAccessiblePath( context.getDocument(), selection.getStartLineIndex()) if reverse: from java.util import Collections Collections.reverse(ret) return ret
def _GetCurrentASTPath(context, reverse=False): ''' @return: ArrayList(SimpleNode) ''' FastParser = context.getFastParserClass() # from org.python.pydev.parser.fastparser import FastParser selection = _CreateSelection(context) ret = FastParser.parseToKnowGloballyAccessiblePath( context.getDocument(), selection.getStartLineIndex()) if reverse: from java.util import Collections # @UnresolvedImport Collections.reverse(ret) return ret
def repopulateGameList(self): """ generated source for method repopulateGameList """ theRepository = self.getSelectedGameRepository() theKeyList = ArrayList(theRepository.getGameKeys()) Collections.sort(theKeyList) self.theGameList.removeAllItems() for theKey in theKeyList: if theGame == None: continue if theName == None: theName = theKey if 24 > len(theName): theName = theName.substring(0, 24) + "..." self.theGameList.addItem(self.NamedItem(theKey, theName))
def run(config): """Display the config as a fully resolved set of properties""" print '\nConfiguration properties are:' v = Vector(config.keySet()) Collections.sort(v) it = v.iterator() while (it.hasNext()): element = it.next(); if (String(element.lower()).endsWith('.password')): printValue = '****' else: printValue = config.get(element) print ' ' + element + "=" + printValue print '\n'
def run(config): """Display the config as a fully resolved set of properties""" print '\nConfiguration properties are:' v = Vector(config.keySet()) Collections.sort(v) it = v.iterator() while (it.hasNext()): element = it.next() if (String(element.lower()).endsWith('.password')): printValue = '****' else: printValue = config.get(element) print ' ' + element + "=" + printValue print '\n'
def removeFolderFromOSBDomain(folder): '''remove a folder''' try: domainRuntime() sessionName = "RemoveFolderSession_" + str(System.currentTimeMillis()) msg("Trying to remove " + folder, _LOG_LEVEL.INFO) sessionManagementMBean = findService(SessionManagementMBean.NAME, SessionManagementMBean.TYPE) msg("SessionMBean started session", _LOG_LEVEL.INFO) sessionManagementMBean.createSession(sessionName) msg('Created session <' + sessionName + '>', _LOG_LEVEL.INFO) folderRef = Refs.makeParentRef(folder) alsbConfigurationMBean = findService( ALSBConfigurationMBean.NAME + "." + sessionName, ALSBConfigurationMBean.TYPE) if alsbConfigurationMBean.exists(folderRef): msg("#### removing OSB folder: " + folder, _LOG_LEVEL.INFO) alsbConfigurationMBean.delete(Collections.singleton(folderRef)) sessionManagementMBean.activateSession( sessionName, "Complete service removal with customization using wlst") else: msg("OSB folder <" + folder + "> does not exist", _LOG_LEVEL.WARNING) discardSession(sessionManagementMBean, sessionName) print except: msg("Error whilst removing project:" + sys.exc_info()[0], _LOG_LEVEL.ERROR) discardSession(sessionManagementMBean, sessionName)
def makeNextAssignmentValid(self): """ generated source for method makeNextAssignmentValid """ if self.nextAssignment == None: return # Something new that can pop up with functional constants... i = 0 while i < len(self.nextAssignment): if self.nextAssignment.get(i) == None: # Some function doesn't agree with the answer here # So what do we increment? incrementIndex(self.plan.getIndicesToChangeWhenNull().get(i)) if self.nextAssignment == None: return i = -1 i += 1 # Find all the unsatisfied distincts # Find the pair with the earliest var. that needs to be changed varsToChange = ArrayList() d = 0 while d < self.plan.getDistincts().size(): # The assignments must use the assignments implied by nextAssignment if term1 == term2: # need to change one of these varsToChange.add(self.plan.getVarsToChangePerDistinct().get(d)) d += 1 if not varsToChange.isEmpty(): # We want just the one, as it is a full restriction on its # own behalf changeOneInNext(Collections.singleton(varToChange))
def getPreselectionIDPParams(self): param = { "saml" : None, "social": None } acrs = [self.getAcrFor(True), self.getAcrFor(False)] custScriptService = CdiUtil.bean(CustomScriptService) scriptsList = custScriptService.findCustomScripts(Collections.singletonList(CustomScriptType.PERSON_AUTHENTICATION), "oxConfigurationProperty", "displayName", "gluuStatus") for customScript in scriptsList: if customScript.isEnabled() and customScript.getName() in acrs: for prop in customScript.getConfigurationProperties(): if prop.getValue1() == "authz_req_param_provider" and StringHelper.isNotEmpty(prop.getValue2()): param["saml" if customScript.getName() == "passport_saml" else "social"] = prop.getValue2() break if param["saml"] != None: print "Casa. getPreselectionIDPParams. Found oxAuth cust param for SAML IDPs authz requests '%s'" % param["saml"] else: print "Casa. getPreselectionIDPParams. oxAuth cust param for SAML IDPs authz requests not found. IDPs won't be available" if param["social"] != None: print "Casa. getPreselectionIDPParams. Found oxAuth cust param for OAuth/OIDC providers' authz requests '%s'" % param["social"] else: print "Casa. getPreselectionIDPParams. oxAuth cust param for for OAuth/OIDC providers' authz requests not found. OPs won't be available" return param
def getPassportRedirectUrl(self, provider): # provider is assumed to exist in self.registeredProviders url = None try: facesContext = CdiUtil.bean(FacesContext) tokenEndpoint = "https://%s/passport/token" % facesContext.getExternalContext( ).getRequest().getServerName() httpService = CdiUtil.bean(HttpService) httpclient = httpService.getHttpsClient() print "Passport. getPassportRedirectUrl. Obtaining token from passport at %s" % tokenEndpoint resultResponse = httpService.executeGet( httpclient, tokenEndpoint, Collections.singletonMap("Accept", "text/json")) httpResponse = resultResponse.getHttpResponse() bytes = httpService.getResponseContent(httpResponse) response = httpService.convertEntityToString(bytes) print "Passport. getPassportRedirectUrl. Response was %s" % httpResponse.getStatusLine( ).getStatusCode() tokenObj = json.loads(response) url = "/passport/auth/%s/%s" % (provider, tokenObj["token_"]) except: print "Passport. getPassportRedirectUrl. Error building redirect URL: ", sys.exc_info( )[1] return url
def getPassportRedirectUrl(self, provider): # provider is assumed to exist in self.registeredProviders url = None try: facesContext = CdiUtil.bean(FacesContext) tokenEndpoint = "https://%s/passport/token" % facesContext.getExternalContext().getRequest().getServerName() httpService = CdiUtil.bean(HttpService) httpclient = httpService.getHttpsClient() print "Passport. getPassportRedirectUrl. Obtaining token from passport at %s" % tokenEndpoint resultResponse = httpService.executeGet(httpclient, tokenEndpoint, Collections.singletonMap("Accept", "text/json")) httpResponse = resultResponse.getHttpResponse() bytes = httpService.getResponseContent(httpResponse) response = httpService.convertEntityToString(bytes) print "Passport. getPassportRedirectUrl. Response was %s" % httpResponse.getStatusLine().getStatusCode() tokenObj = json.loads(response) url = "/passport/auth/%s/%s" % (provider, tokenObj["token_"]) except: print "Passport. getPassportRedirectUrl. Error building redirect URL: ", sys.exc_info()[1] return url
def undeployProxyFromOSBDomain(relativePath, proxyServiceName): '''Remove a proxyservice''' try: domainRuntime() sessionName = "UndeployProxySession_" + str(System.currentTimeMillis()) msg("Trying to remove " + proxyServiceName, _LOG_LEVEL.INFO) sessionManagementMBean = findService(SessionManagementMBean.NAME, SessionManagementMBean.TYPE) msg("SessionMBean started session", _LOG_LEVEL.INFO) sessionManagementMBean.createSession(sessionName) msg('Created session <' + sessionName + '>', _LOG_LEVEL.INFO) serviceRef, sessionBean = findProxyService(relativePath, proxyServiceName, sessionName) alsbConfigurationMBean = findService( ALSBConfigurationMBean.NAME + "." + sessionName, ALSBConfigurationMBean.TYPE) if alsbConfigurationMBean.exists(serviceRef): msg("#### removing OSB proxy service: " + proxyServiceName, _LOG_LEVEL.INFO) alsbConfigurationMBean.delete(Collections.singleton(serviceRef)) sessionManagementMBean.activateSession( sessionName, "Complete service removal with customization using wlst") else: msg("OSB project <" + proxyServiceName + "> does not exist", _LOG_LEVEL.WARNING) discardSession(sessionManagementMBean, sessionName) except: msg("Error whilst removing project:" + sys.exc_info()[0], _LOG_LEVEL.ERROR) discardSession(sessionManagementMBean, sessionName) raise
def pyValToJavaObj(val): retObj = val valtype = type(val) if valtype is int: retObj = Integer(val) elif valtype is float: retObj = Float(val) elif valtype is long: retObj = Long(val) elif valtype is bool: retObj = Boolean(val) elif valtype is list: retObj = ArrayList() for i in val: retObj.add(pyValToJavaObj(i)) elif valtype is tuple: tempList = ArrayList() for i in val: tempList.add(pyValToJavaObj(i)) retObj = Collections.unmodifiableList(tempList) elif issubclass(valtype, dict): retObj = pyDictToJavaMap(val) elif issubclass(valtype, JavaWrapperClass): retObj = val.toJavaObj() return retObj
def getCondensationSet(cls, rule, model, checker, sentenceNameSource): """ generated source for method getCondensationSet """ varsInRule = GdlUtils.getVariables(rule) varsInHead = GdlUtils.getVariables(rule.getHead()) varsNotInHead = ArrayList(varsInRule) varsNotInHead.removeAll(varsInHead) for var in varsNotInHead: ConcurrencyUtils.checkForInterruption() for literal in rule.getBody(): if GdlUtils.getVariables(literal).contains(var): minSet.add(literal) for literal in minSet: if isinstance(literal, (GdlRelation, )): varsSupplied.addAll(GdlUtils.getVariables(literal)) elif isinstance(literal, (GdlDistinct, )) or isinstance(literal, (GdlNot, )): varsNeeded.addAll(GdlUtils.getVariables(literal)) varsNeeded.removeAll(varsSupplied) if not varsNeeded.isEmpty(): continue for varNeeded in varsNeeded: for literal in rule.getBody(): if isinstance(literal, (GdlRelation, )): if GdlUtils.getVariables(literal).contains(varNeeded): suppliers.add(literal) candidateSuppliersList.add(suppliers) for suppliers in candidateSuppliersList: if Collections.disjoint(suppliers, literalsToAdd): literalsToAdd.add(suppliers.iterator().next()) minSet.addAll(literalsToAdd) if goodCondensationSetByHeuristic(minSet, rule, model, checker, sentenceNameSource): return minSet return None
def checkForJob(self,workDir): jobFile = os.path.join(workDir,'sentinel') if os.path.exists(jobFile): partId = int(os.path.basename(workDir)) mc = ManagementContainer.getInstance() pm = mc.getPartitionManager() partition = pm.getPartition(partId) log('Found a purge job for partition',partId) if self.processJob(partition,workDir,jobFile): log('Purge succeeded. Clearing jobs directory (',workDir,')to signal dispatcher that a partition is completed') self.clearJobDirectory(workDir) pm.refreshFreeSpaceInfo(Collections.singletonList(partition)) else: log('Purge failed. Not clearing job directory(',workDir,'). Will retry with current batch later') count = 1 if partId in self.failuresByPartId: count = self.failuresByPartId[partId] count = count + 1 if count >= self.maxFailures: log('Purge: failed purge',count,'times for partition',partId,'. Clearing job') self.clearJobDirectory(workDir) count = 0 self.failuresByPartId[partId] = count return True return False
def getPassportRedirectUrl(self, provider, loginHint): # provider is assumed to exist in self.registeredProviders url = None try: facesContext = CdiUtil.bean(FacesContext) tokenEndpoint = "https://%s/passport/token" % facesContext.getExternalContext().getRequest().getServerName() httpService = CdiUtil.bean(HttpService) httpclient = httpService.getHttpsClient() print "Passport-social. getPassportRedirectUrl. Obtaining token from passport at %s" % tokenEndpoint resultResponse = httpService.executeGet(httpclient, tokenEndpoint, Collections.singletonMap("Accept", "text/json")) httpResponse = resultResponse.getHttpResponse() bytes = httpService.getResponseContent(httpResponse) response = httpService.convertEntityToString(bytes) print "Passport-social. getPassportRedirectUrl. Response was %s" % httpResponse.getStatusLine().getStatusCode() locale = CdiUtil.bean(LanguageBean).getLocaleCode()[:2] if (locale != "en" and locale != "fr"): locale = "en" tokenObj = json.loads(response) if (loginHint != None): url = "/passport/auth/%s/%s/locale/%s/id/%s" % (provider, tokenObj["token_"], locale, Base64Util.base64urlencode(loginHint)) else: url = "/passport/auth/%s/%s/locale/%s" % (provider, tokenObj["token_"], locale ) print "Passport-social. getPassportRedirectUrl. Returning URL = %s" % url except: print "Passport-social. getPassportRedirectUrl. Error building redirect URL: ", sys.exc_info()[1] return url
def test_ops_on_same_key(self): """ 1. Set key 2. Delete a key 3. Set the same key 4. Validate the rev_id for the key is maintained Ref: MB-48179 """ if self.durability_level in ["", Bucket.DurabilityLevel.NONE]: self.fail("Test supported only for sync_write scenarios") crud_pattern = self.input.param("crud_pattern", "async:sync:async") crud_pattern = crud_pattern.split(":") rev_ids = dict() client = SDKClient([self.cluster.master], self.cluster.buckets[0]) # Async create of keys for i in range(self.num_items): key = self.key + str(i) durability = "" if crud_pattern[0] == "sync": durability = self.durability_level client.crud(DocLoading.Bucket.DocOps.CREATE, key, {}, durability=durability) # Sync delete of keys for i in range(self.num_items): key = self.key + str(i) durability = "" if crud_pattern[1] == "sync": durability = self.durability_level client.crud(DocLoading.Bucket.DocOps.DELETE, key, durability=durability) # Async create of keys for i in range(self.num_items): key = self.key + str(i) durability = "" if crud_pattern[2] == "sync": durability = self.durability_level client.crud(DocLoading.Bucket.DocOps.CREATE, key, {}, durability=durability) result = client.collection.lookupIn( key, Collections.singletonList( LookupInSpec.get(LookupInMacro.REV_ID).xattr())) rev_ids[key] = int(result.contentAs(0, String)) client.close() # Rev_id validation for i in range(self.num_items): key = self.key + str(i) if rev_ids[key] != 3: self.fail("Rev id mismatch for key '%s'. RevId: %s" % (key, rev_ids[key]))
def mergeBaseRelations(self, rels): """ generated source for method mergeBaseRelations """ merges = HashMap() for rel in rels: if not merges.containsKey(name): merges.put(name, ArrayList()) addRelToMerge(rel, merge) rval = HashSet() valConst = GdlPool.getConstant("val") for c in merges.keySet(): body.add(c) for mergeSet in merge: Collections.sort(ms2, SortTerms()) body.add(GdlPool.getFunction(valConst, ms2)) rval.add(toAdd) return rval
def onInit(): global eventCounter # Variables for assertions only eventCounter = Collections.synchronizedMap(HashMap()) eventCounter.put("blue", AtomicInteger(0)) eventCounter.put("red", AtomicInteger(0)) sponge.setVariable("eventCounter", eventCounter)
def test_super_methods_merged(self): '''Checks that all signatures on a class' methods are found, not just the first for a name Bug #628315''' synchList = Collections.synchronizedList(ArrayList()) synchList.add("a string") self.assertEquals("a string", synchList.remove(0))
def onInit(): global running running = Collections.synchronizedMap(LinkedHashMap()) running.put("Rule1", AtomicBoolean(False)) running.put("Rule2", AtomicBoolean(False)) running.put("Rule3", AtomicBoolean(False)) running.put("Rule4", AtomicBoolean(False)) sponge.setVariable("testStatus", None)
def onInit(): global events # Variables for assertions only events = Collections.synchronizedMap(HashMap()) events.put("defaultClonePolicy", ArrayList()) events.put("deepClonePolicy", ArrayList()) events.put("shallowClonePolicy", ArrayList()) sponge.setVariable("events", events)
def init(self, configurationAttributes): print "Casa. init called" self.authenticators = {} self.configFileLocation = "/etc/gluu/conf/casa.json" self.uid_attr = self.getLocalPrimaryKey() custScriptService = CdiUtil.bean(CustomScriptService) self.scriptsList = custScriptService.findCustomScripts( Collections.singletonList(CustomScriptType.PERSON_AUTHENTICATION), "oxConfigurationProperty", "displayName", "oxEnabled", "oxLevel") dynamicMethods = self.computeMethods(self.scriptsList) if len(dynamicMethods) > 0: print "Casa. init. Loading scripts for dynamic modules: %s" % dynamicMethods for acr in dynamicMethods: moduleName = self.modulePrefix + acr try: external = __import__(moduleName, globals(), locals(), ["PersonAuthentication"], -1) module = external.PersonAuthentication( self.currentTimeMillis) print "Casa. init. Got dynamic module for acr %s" % acr configAttrs = self.getConfigurationAttributes( acr, self.scriptsList) if acr == self.ACR_U2F: u2f_application_id = configurationAttributes.get( "u2f_app_id").getValue2() configAttrs.put( "u2f_application_id", SimpleCustomProperty("u2f_application_id", u2f_application_id)) elif acr == self.ACR_SG: application_id = configurationAttributes.get( "supergluu_app_id").getValue2() configAttrs.put( "application_id", SimpleCustomProperty("application_id", application_id)) if module.init(configAttrs): module.configAttrs = configAttrs self.authenticators[acr] = module else: print "Casa. init. Call to init in module '%s' returned False" % moduleName except: print "Casa. init. Failed to load module %s" % moduleName print "Exception: ", sys.exc_info()[1] mobile_methods = configurationAttributes.get("mobile_methods") self.mobile_methods = [] if mobile_methods == None else StringHelper.split( mobile_methods.getValue2(), ",") print "Casa. init. Initialized successfully" return True
def createRequest(self, providerId, params): """Create a redirect URL to send an authentication request to passport.""" url = None try: providerConfig = self.registeredProviders.get(providerId) if providerConfig is None: print("Passport. createRequest. Provider %s does not exist" % providerId) raise PassportError() facesContext = CdiUtil.bean(FacesContext) serverName = facesContext.getExternalContext().getRequest( ).getServerName() tokenEndpoint = "https://%s/passport/token" % serverName httpService = CdiUtil.bean(HttpService) httpclient = httpService.getHttpsClient() resultResponse = httpService.executeGet( httpclient, tokenEndpoint, Collections.singletonMap("Accept", "text/json")) httpResponse = resultResponse.getHttpResponse() bytes = httpService.getResponseContent(httpResponse) response = httpService.convertEntityToString(bytes) if response is not None: token = json.loads(response)["token_"] else: raise PassportError("Failed to obtain token from Passport") if params is not None: jsonParams = json.dumps(params) encryptedParams = CdiUtil.bean(EncryptionService).encrypt( jsonParams) # Need to translate from base64 to base64url to make it URL-friendly for passport # See RFC4648 section 5 encodedParams = StringUtils.replaceChars( encryptedParams, "/+", "_-") url = "/passport/auth/%s/%s/%s" % (providerId, token, encodedParams) if providerConfig["GCCF"]: # Need to set the language cookie langCode = { "en": "eng", "fr": "fra" }[params["ui_locales"][:2].lower()] url = "%s?lang=%s&return=%s" % ( self.passportConfig["languageCookieService"], langCode, URLEncoder.encode("https://" + serverName + url, "UTF8")) else: url = "/passport/auth/%s/%s" % (providerId, token) except: print("Passport. createRequest. Error building redirect URL: ", sys.exc_info()[1]) return url
def test_type_error(self): try: from java.util import Collections, ArrayList x = ArrayList() c = Collections.checkedList(x, Integer) c.add(Integer(5)) c.add(String("5")) except TypeError as ex: pass
def refeedMessage(mc,clusLocId,partId,messageId,customerId,senderId,recipIds): mm = mc.getMessageMatchStoreManager() rmmd = RemoteMatchDescriptor(customerId) rmmd.setPartitionID(int(partId)) rmmd.setStorageID(long(messageId)) rmmd.setSenderId(int(senderId)) rmmd.setRecipientIds(recipIds) msgData = Collections.singletonList(rmmd) mm.addFastUpdateReference(msgData,int(clusLocId),1)
def getCompleteTurnSet(self): """ generated source for method getCompleteTurnSet """ if self.completeTurnSet == None: self.completeTurnSet = HashSet() while i < self.getNumTurns(): self.completeTurnSet.add(i) i += 1 self.completeTurnSet = Collections.unmodifiableSet(self.completeTurnSet) return self.completeTurnSet
def exportAll(): try: ALSBConfigurationMBean = findService("ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == "None" : ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None : print "Export the config" theBytes = ALSBConfigurationMBean.export(collection, true, None) else : print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export(collection, true, passphrase) else : ref = Ref.makeProjectRef(project); print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects(collection, passphrase) aFile = File(exportJar) out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: "+ exportJar + " has been exported" if customFile != "None": print collection query = EnvValueQuery(None, Collections.singleton(EnvValueTypes.WORK_MANAGER), collection, false, None, false) customEnv = FindAndReplaceCustomization('Set the right Work Manager', query, 'Production System Work Manager') print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() print "ALSB Dummy Customization file: "+ customFile + " has been created" except: raise
def deleteProject(alsbConfigurationMBean, projectName): try: projectRef = Ref(Ref.PROJECT_REF, Ref.DOMAIN, projectName) if alsbConfigurationMBean.exists(projectRef): print 'INFO: Removing OSB project: ' + projectName alsbConfigurationMBean.delete(Collections.singleton(projectRef)) print 'INFO: Removed OSB project: ' + projectName else: raise ValueError('No OSB project exists with name ' + projectName) except: raise
def onInit(): global eventCounter # Variables for assertions only eventCounter = Collections.synchronizedMap(HashMap()) eventCounter.put("Trigger1, file1", AtomicInteger(0)) eventCounter.put("Trigger2, file1", AtomicInteger(0)) eventCounter.put("Trigger1, file2", AtomicInteger(0)) eventCounter.put("Trigger2, file2", AtomicInteger(0)) eventCounter.put("Trigger1, file3", AtomicInteger(0)) eventCounter.put("Trigger3, file3", AtomicInteger(0)) sponge.setVariable("eventCounter", eventCounter)
def getTupleFromSentence(cls, sentence): """ generated source for method getTupleFromSentence """ if isinstance(sentence, (GdlProposition, )): return Collections.emptyList() # A simple crawl through the sentence. tuple_ = ArrayList() try: addBodyToTuple(sentence.getBody(), tuple_) except RuntimeException as e: raise RuntimeException(e.getMessage() + "\nSentence was " + sentence) return tuple_
def computeHighlighting(text): lastKwEnd = 0 spansBuilder = StyleSpansBuilder() for m in re.finditer(PATTERN,text): styleClass = "keyword" if m.group("keyword"): styleClass = "keyword" elif m.group("brace"): styleClass = "paren" elif m.group("comment"): styleClass = "comment" elif m.group("types"): styleClass = "types" elif m.group("cond"): styleClass = "conditional" spansBuilder.add(Collections.emptyList(), m.start() - lastKwEnd) spansBuilder.add(Collections.singleton(styleClass), m.end() - m.start()) lastKwEnd = m.end() spansBuilder.add(Collections.emptyList(), len(text)- lastKwEnd) return spansBuilder.create()
def getAssignmentMakingLeftIntoRight(cls, left, right): """ generated source for method getAssignmentMakingLeftIntoRight """ assignment = HashMap() if not left.__name__ == right.__name__: return None if left.arity() != right.arity(): return None if left.arity() == 0: return Collections.emptyMap() if not fillAssignmentBody(assignment, left.getBody(), right.getBody()): return None return assignment
def __init__(self): self.speed = 5; self.speedZoom = .01; self.m_systemID = HashedString("RTSCameraSystem") #private final static List<HashedString> usedComponents; #private final static List<HashedString> optionalComponents; #private final static Set<HashedString> writeToComponents; #private final static Set<HashedString> otherComponents; #private final static Set<HashedString> usedInterfaces; #private final static Set<HashedString> writeToInterfaces; components = ArrayList() components.add(RTSCameraComponent.getComponentStaticType()) self.m_usedComponents = Collections.unmodifiableList(components) components = ArrayList() components.add(CameraComponent.getComponentStaticType()) self.m_optionalComponents = Collections.unmodifiableList(components) writes = HashSet() writes.add(RTSCameraComponent.getComponentStaticType()) writes.add(CameraComponent.getComponentStaticType()) self.m_writeToComponents = Collections.unmodifiableSet(writes) self.m_otherComponents = Collections.emptySet() interfaces = HashSet() interfaces.add(SystemManager.inputInteface) self.m_usedInterfaces = Collections.unmodifiableSet(interfaces) self.m_writeToInterfaces = Collections.unmodifiableSet(HashSet(self.m_usedInterfaces))
def _add_nodes(self, curTop, dir): """ Recursive implementation to fill the tree with filenames and directories :param curTop: current top directory :param dir: next directory :return: None """ curPath = dir.getPath() if os.path.isdir(curPath): nodePath = os.path.basename(curPath) curDir = DefaultMutableTreeNode(nodePath) if curTop != None: # should only be null at root curTop.add(curDir) ol = Vector() tmp = dir.list() for i in xrange(0, len(tmp)): ol.addElement(tmp[i]) thisObject = None files = Vector() # Make two passes, one for Dirs and one for Files. This is #1. for i in xrange(0, ol.size()): thisObject = ol.elementAt(i) if curPath == self._dir: newPath = thisObject else: newPath = os.path.join(curPath, thisObject) f = File(newPath) if f.isDirectory(): self._add_nodes(curDir, f) else: files.addElement(thisObject) # Pass two: for files. Collections.sort(files) for i in xrange(0, files.size()): f = files.elementAt(i) #if f.split('.')[-1] != 'html': curDir.add(DefaultMutableTreeNode(files.elementAt(i))) return curDir
def extract_content_between_block_level_nodes(self, doc): """ Shows how to extract the content between a paragraph and table using the ExtractContent method. """ startPara = doc.getLastSection().getChild(NodeType.PARAGRAPH, 2, True) endTable = doc.getLastSection().getChild(NodeType.TABLE, 0, True) # Extract the content between these nodes in the document. Include these markers in the extraction. extractedNodes = self.extract_contents(startPara, endTable, True) # Lets reverse the array to make inserting the content back into the document easier. Collections.reverse(extractedNodes[::-1]) while (len(extractedNodes) > 0): # Insert the last node from the reversed list endTable.getParentNode().insertAfter(extractedNodes[0], endTable) # Remove this node from the list after insertion. del extractedNodes[0] # Save the generated document to disk. doc.save(self.dataDir + "TestFile.DuplicatedContent Out.doc")
def set_azure_container_permission(self, container_public_access=False): if container_public_access: identifier = BlobSignedIdentifier().setId( self.blob_container_client.getBlobContainerName( )).setAccessPolicy(BlobAccessPolicy().setStartsOn( OffsetDateTime.now()).setExpiresOn( OffsetDateTime.now().plusMinutes(1)).setPermissions( "racwdl")) self.blob_container_client.setAccessPolicy( PublicAccessType.CONTAINER, Collections.singletonList(identifier)) else: self.blob_container_client.setAccessPolicy( PublicAccessType.CONTAINER, None)
def setupExportCustomer(numMessages, islandId, cloudDomain, stage1Mta): print time.asctime(), 'Start setting ' userMsgs = [0] * len(users) mc = ManagementContainer.getInstance() custId = setupCustomer(mc, str(islandId),domain=domainName, name=custname, isCloud=True,userAccounts=users, recvDate="now") print time.asctime(), 'customerID', custId propagateMtaConfig() custList = mc.getCustomerManager().findCustomers([SearchConstraint(ICustomerManager.PROP_NAME, SearchConstraintOperator.CONSTRAINT_EQUALS, custname)]) cust = custList[0] office365Guid = cust.getGuids(CloudService.OFFICE365)[0].getGuid() # stage cloud messages for i in range(numMessages): if (i % 5) == 0: sendJournalMessage(office365Guid, users[0], ["invaliduser"], None, None, domainName, cloudDomain, stage1Mta) userMsgs[0] += 1 elif (i % 4) == 0: sendJournalMessage(office365Guid, "invaliduser", [users[1]], None, None, domainName, cloudDomain, stage1Mta) userMsgs[1] += 1 elif (i % 3) == 0: sendJournalMessage(office365Guid, "invaliduser", None, [users[2]], None, domainName, cloudDomain, stage1Mta) userMsgs[2] += 1 elif (i % 2) == 0: sendJournalMessage(office365Guid, "invaliduser", None, None, [users[3]], domainName, cloudDomain, stage1Mta) userMsgs[3] += 1 else : sendJournalMessage(office365Guid, users[0], [users[1]], [users[2]], [users[3]], domainName, cloudDomain, stage1Mta) for j in range(len(users)): userMsgs[j] += 1 sleep(1) # create reviewer group print time.asctime(), 'creating reviewer group...' allUsers = InternalUserSets.getAllUsersSet(cust.getCustID()) mc.getUserManager().saveUserSet(allUsers) reviewer = mc.getUserManager().findUserForEmail(users[0] + '@' + domainName) reviewerSet = SavedUserSet(cust.getCustID()) reviewerSet.addUsers(Collections.singletonList(SearchConstraint(IUserManager.PROP_USERID, SearchConstraintOperator.CONSTRAINT_EQUALS, reviewer.getUserID()))) mc.getUserManager().saveUserSet(reviewerSet) mc.getReviewerGroupManager().createReviewerGroup(cust.getCustID(), REVIEWER_GROUP_NAME, reviewerSet, allUsers, None) sys.exit(0)
def getDependentServices(ambiente, configMBean): psQuery = ProxyServiceQuery() myPSSet = configMBean.getRefs(psQuery) relDepServices = []; for myPS in myPSSet: depQuery = DependencyQuery(Collections.singleton(myPS), False) refs = configMBean.getRefs(depQuery) for ref in refs: if (ref.getTypeId() == "BusinessService" or ref.getTypeId() == "ProxyService"): update = "INSERT INTO izzi_service_dependencies (SERVICE,DEPENDENTSERVICE, AMBIENTE) values ('" + myPS.getFullName() + "','" + ref.getFullName() + "','"+ambiente+"')" print update relDepServices.append(update) return relDepServices
def undeployProjects(): SessionMBean = None print 'Attempting to undeploy from ALSB Admin Server listening on ', adminUrl # domainRuntime() sessionName = "TransientWLSTSession_" + str(System.currentTimeMillis()) sessMgmtMBean = findService(SessionManagementMBean.NAME, SessionManagementMBean.TYPE) sessMgmtMBean.createSession(sessionName) print 'Created session [', sessionName, ']' alsbConfigMBean = findService( ALSBConfigurationMBean.NAME + "." + sessionName, ALSBConfigurationMBean.TYPE) projectRefs = alsbConfigMBean.getRefs(Ref.DOMAIN) projectList = projectRefs.iterator() while projectList.hasNext(): projectRef = projectList.next() if projectRef.getTypeId() == Ref.PROJECT_REF: print "Project name : " + (projectRef.getProjectName()) if alsbConfigMBean.exists(projectRef): print "#### removing OSB project: " + projectRef.getProjectName( ) if projectRef.getProjectName() == "System": print "Omitting System project ..." else: alsbConfigMBean.delete(Collections.singleton(projectRef)) print else: failed = "OSB project <" + projectRef.getProjectName( ) + "> does not exist" print failed print print "Activating session ... " sessMgmtMBean.activateSession( sessionName, "Complete project removal with customization using wlst") print "Session activated." cleanupSession(sessMgmtMBean, sessionName) print "done!"
def getAllServiceURIs(ambiente, configMBean): relAllServicesURI = []; evquery = EnvValueQuery(None, Collections.singleton(EnvValueTypes.SERVICE_URI), None, False, None, False) founds = configMBean.findEnvValues(evquery) for value in founds: update = "UPDATE izzi_SERVICES set SERVICE_URI = '" + value.getValue() + "' where SERVICEFULLPATH ='" + value.getOwner().getFullName() + "'" relAllServicesURI.append(update) return relAllServicesURI
def deleteOSBProject(alsbConfigurationMBean, projectName): '''Delete a OSB project''' try: msg("Trying to remove " + projectName, _LOG_LEVEL.INFO) projectRef = Ref(Ref.PROJECT_REF, Ref.DOMAIN, projectName) if alsbConfigurationMBean.exists(projectRef): msg("#### removing OSB project: " + projectName, _LOG_LEVEL.INFO) alsbConfigurationMBean.delete(Collections.singleton(projectRef)) msg("#### removed project: " + projectName, _LOG_LEVEL.INFO) else: msg("OSB project <" + projectName + "> does not exist", _LOG_LEVEL.WARNING) except: msg("Error whilst removing project:" + sys.exc_info()[0], _LOG_LEVEL.ERROR) raise
def getSourceConjunctChildren(self): """ generated source for method getSourceConjunctChildren """ children = ArrayList() # If we are already using functions, short-circuit to cut off # repetition of the search space for index in functionalConjunctIndices: if index != -1: return Collections.emptyList() # This means we want a reference to the original list of conjuncts. lastSourceConjunctIndex = -1 if not self.sourceConjunctIndices.isEmpty(): lastSourceConjunctIndex = self.sourceConjunctIndices.get(len(self.sourceConjunctIndices) - 1) i = lastSourceConjunctIndex + 1 while i < len(self.sourceConjunctCandidates): children.add(IterationOrderCandidate(self, i)) i += 1 return children
def createWalkableZones(): MeshMaker.ledgeTiles = MeshMaker.getLedgeTiles() if len(MeshMaker.ledgeTiles) == 0: raise ValueError('No ledge tiles found on this map!') MeshMaker.ledgeTiles = MeshMaker.tileListSorter( MeshMaker.ledgeTiles) ledgeTiles2 = Collections.synchronizedList(MeshMaker.ledgeTiles) MeshMaker.walkableTileList = MeshMaker.splitWalkableZones( ledgeTiles2) MeshMaker.walkableTilesMap = {} for tl in MeshMaker.walkableTileList: s = MeshMaker.makeWalkShape(tl[0], tl[-1]) MeshMaker.walkableZones.append(s) MeshMaker.walkableTilesMap[s] = tl
def init(self, configurationAttributes): print "Casa. init called" self.authenticators = {} self.configFileLocation = "/etc/gluu/conf/casa.json" self.uid_attr = self.getLocalPrimaryKey() custScriptService = CdiUtil.bean(CustomScriptService) self.scriptsList = custScriptService.findCustomScripts(Collections.singletonList(CustomScriptType.PERSON_AUTHENTICATION), "oxConfigurationProperty", "displayName", "gluuStatus", "oxLevel") dynamicMethods = self.computeMethods(self.scriptsList) if len(dynamicMethods) > 0: print "Casa. init. Loading scripts for dynamic modules: %s" % dynamicMethods for acr in dynamicMethods: moduleName = self.modulePrefix + acr try: external = __import__(moduleName, globals(), locals(), ["PersonAuthentication"], -1) module = external.PersonAuthentication(self.currentTimeMillis) print "Casa. init. Got dynamic module for acr %s" % acr configAttrs = self.getConfigurationAttributes(acr, self.scriptsList) if acr == self.ACR_U2F: u2f_application_id = configurationAttributes.get("u2f_app_id").getValue2() configAttrs.put("u2f_application_id", SimpleCustomProperty("u2f_application_id", u2f_application_id)) elif acr == self.ACR_SG: client_redirect_uri = configurationAttributes.get("supergluu_app_id").getValue2() configAttrs.put("client_redirect_uri", SimpleCustomProperty("client_redirect_uri", client_redirect_uri)) if module.init(configAttrs): module.configAttrs = configAttrs self.authenticators[acr] = module else: print "Casa. init. Call to init in module '%s' returned False" % moduleName except: print "Casa. init. Failed to load module %s" % moduleName print "Exception: ", sys.exc_info()[1] mobile_methods = configurationAttributes.get("mobile_methods") self.mobile_methods = [] if mobile_methods == None else StringHelper.split(mobile_methods.getValue2(), ",") print "Casa. init. Initialized successfully" return True
def setName(self, name): self._thread.setName(str(name)) def isAlive(self): return self._thread.isAlive() def isDaemon(self): return self._thread.isDaemon() def setDaemon(self, daemonic): self._thread.setDaemon(bool(daemonic)) # relies on the fact that this is a CHM _threads = weakref.WeakValueDictionary() _active = _threads _jthread_to_pythread = Collections.synchronizedMap(WeakHashMap()) class Thread(JavaThread): def __init__(self, group=None, target=None, name=None, args=None, kwargs=None): assert group is None, "group argument must be None for now" _thread = self._create_thread() JavaThread.__init__(self, _thread) if args is None: args = () if kwargs is None: kwargs = {} self._target = target self._args = args self._kwargs = kwargs if name: self._thread.setName(str(name))
def exportAll(): try: ALSBConfigurationMBean = findService("ALSBConfiguration", "com.bea.wli.sb.management.configuration.ALSBConfigurationMBean") print "ALSBConfiguration MBean found" print project if project == "None" : ref = Ref.DOMAIN collection = Collections.singleton(ref) if passphrase == None : print "Export the config" theBytes = ALSBConfigurationMBean.export(collection, true, None) else : print "Export and encrypt the config" theBytes = ALSBConfigurationMBean.export(collection, true, passphrase) else : ref = Ref.makeProjectRef(project); print "Export the project", project collection = Collections.singleton(ref) theBytes = ALSBConfigurationMBean.exportProjects(collection, passphrase) aFile = File(exportJar) out = FileOutputStream(aFile) out.write(theBytes) out.close() print "ALSB Configuration file: "+ exportJar + " has been exported" if customFile != "None": print collection # see com.bea.wli.sb.util.EnvValueTypes in sb-kernel-api.jar for the values #EnvValueQuery evquery = # new EnvValueQuery( # null, // search across all resource types # Collections.singleton(EnvValueTypes.URI_ENV_VALUE_TYPE), // search only the URIs # null, // search across all projects and folders. # true, // only search across resources that are # // actually modified/imported in this session # "localhost", // the string we want to replace # false // not a complete match of URI. any URI # // that has "localhost" as substring will match # ); refTypes = HashSet() refTypes.add(EnvValueTypes.SERVICE_URI_TABLE) refTypes.add(EnvValueTypes.SERVICE_URI) query = EnvValueQuery(Collections.singleton(Refs.BUSINESS_SERVICE_TYPE), refTypes, collection, false, "search string", false) # query = EnvValueQuery(None, Collections.singleton(EnvValueTypes.SERVICE_URI_TABLE), collection, false, "search string", false) customEnv = FindAndReplaceCustomization('new endpoint url', query, 'replace string') # object = QualifiedEnvValue(Refs.makeBusinessSvcRef(ref,'file'), Refs.BUSINESS_SERVICE_TYPE, "XSDvalidation/file", "aaa") # objects = ArrayList() # objects.add(object) # customEnv2 = EnvValueCustomization('Set the right endpoints', objects) print 'EnvValueCustomization created' customList = ArrayList() customList.add(customEnv) # customList.add(customEnv2) print customList aFile = File(customFile) out = FileOutputStream(aFile) Customization.toXML(customList, out) out.close() print "ALSB Dummy Customization file: "+ customFile + " has been created" except: raise