def getPublishersFromDistributor(self, oshv, distributor, distributorDatabaseName, sqlServerId): #check if i am a distributor first rs = self.connection.doCall('exec sp_helpdistpublisher') publishers = HashMap() sqlServers = HashMap() while (rs.next()): publisherName = rs.getString('name') publisher = ObjectStateHolder('sqlserverpublisher') sqlServer = self.createSqlServer(publisherName, oshv, sqlServerId) publisher.setContainer(sqlServer) publisher.setAttribute(Queries.DATA_NAME, publisherName) publishers.put(publisherName, publisher) sqlServers.put(publisherName, sqlServer) oshv.add(sqlServer) oshv.add(publisher) oshv.add(modeling.createLinkOSH('dblink', publisher, distributor)) #add the dblink between the distributor and the publisher rs.close() if (publishers.size() == 0): return #for each publisher get the published dbs workingDatabase = self.connection.getWorkingDatabase() self.connection.setWorkingDatabase(distributorDatabaseName) itr = publishers.keySet().iterator() while (itr.hasNext()): publisherName = itr.next() publisher = publishers.get(publisherName) sqlServer = sqlServers.get(publisherName) self.getPublications(publisherName, sqlServer, publisher, oshv, sqlServerId) self.connection.setWorkingDatabase(workingDatabase)
def discoverPlans(self,oshv,sqlServerId,dbs): logger.debug("going to get jobs and plans") if self.discoveryOptions and self.discoveryOptions.discoverSqlJob: jobById=self.getSqlJobs(oshv, sqlServerId) else: jobById=HashMap() rs = self.connection.getTable(self.plansQuery) plans = HashMap() while(rs.next()): name = rs.getString('plan_name') id = rs.getString('plan_id') osh = ObjectStateHolder('sqlservermaintenanceplan') osh.setAttribute(Queries.DATA_NAME,name) osh.setAttribute('planId',id) osh.setContainer(sqlServerId) oshv.add(osh) if self.discoveryOptions and self.discoveryOptions.discoverDbUser: owner = rs.getString('owner') # Some plans may not have an owner so we need to check if owner: user = ObjectStateHolder('dbuser') user.setAttribute(Queries.DATA_NAME,owner) user.setContainer(sqlServerId) oshv.add(user) oshv.add(modeling.createLinkOSH('owner',user,osh)) plans.put(name,osh) rs.close() logger.debug("got plans: ", plans.keySet().toString()) self.discoverPlanJobs(oshv,sqlServerId,plans,jobById) self.discoverPlanDbs(oshv,plans,dbs)
def getPublishersFromDistributor(self,oshv,distributor, distributorDatabaseName,sqlServerId): #check if i am a distributor first rs = self.connection.doCall('exec sp_helpdistpublisher') publishers = HashMap() sqlServers = HashMap() while(rs.next()): publisherName = rs.getString('name') publisher = ObjectStateHolder('sqlserverpublisher') sqlServer = self.createSqlServer(publisherName,oshv,sqlServerId) publisher.setContainer(sqlServer) publisher.setAttribute(Queries.DATA_NAME,publisherName) publishers.put(publisherName,publisher) sqlServers.put(publisherName,sqlServer) oshv.add(sqlServer) oshv.add(publisher) oshv.add(modeling.createLinkOSH('dblink',publisher,distributor)) #add the dblink between the distributor and the publisher rs.close() if(publishers.size() == 0): return #for each publisher get the published dbs workingDatabase = self.connection.getWorkingDatabase() self.connection.setWorkingDatabase(distributorDatabaseName) itr = publishers.keySet().iterator() while (itr.hasNext()): publisherName = itr.next() publisher = publishers.get(publisherName) sqlServer = sqlServers.get(publisherName) self.getPublications(publisherName,sqlServer,publisher,oshv,sqlServerId) self.connection.setWorkingDatabase(workingDatabase)
def findMessages(mc,custid,count): print 'finding ' + str(count) + ' messages...' amsm = mc.getActiveMailboxStoreManager() msgs = HashMap() retries = 10 # 10 minutes while msgs.size() < count and retries > 0: sleep(60) retries = retries - 1 for p in mc.getPartitionManager().listPartitions(): if p.isReadOnly(): continue print 'searching for messages to be stored in',p for msg in amsm.findMessages([SearchConstraint(IActiveMailboxStoreManager.PROP_CUST_ID, SearchConstraintOperator.CONSTRAINT_EQUALS,int(custid))],p,True): msgs.put(msg.getMessageId(), msg) print 'found',msgs.size(),'messages',msgs.keySet() if msgs.isEmpty(): print 'Failed to find any messages in DB' raise Exception('Failed to find any messages in DB') if msgs.size() < count: print 'Warning, did not find all messages expected' return msgs.values()
def mergeBaseRelations(self, rels): """ generated source for method mergeBaseRelations """ merges = HashMap() for rel in rels: if not merges.containsKey(name): merges.put(name, ArrayList()) addRelToMerge(rel, merge) rval = HashSet() valConst = GdlPool.getConstant("val") for c in merges.keySet(): body.add(c) for mergeSet in merge: Collections.sort(ms2, SortTerms()) body.add(GdlPool.getFunction(valConst, ms2)) rval.add(toAdd) return rval
class IndexData: def __activate__(self, context): # Prepare variables self.index = context["fields"] self.object = context["object"] self.payload = context["payload"] self.params = context["params"] self.utils = context["pyUtils"] self.config = context["jsonConfig"] self.log = context["log"] self.last_modified = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) self.log.debug("Indexing Metadata Record '{}' '{}'", self.object.getId(), self.payload.getId()) # Common data self.__newDoc() self.packagePid = None pidList = self.object.getPayloadIdList() for pid in pidList: if pid.endswith(".tfpackage"): self.packagePid = pid # Real metadata if self.itemType == "object": self.__basicData() self.__metadata() # Some of the above steps may request some # messages be sent, particularly workflows self.__messages() # Make sure security comes after workflows self.__security() def __newDoc(self): self.oid = self.object.getId() self.pid = self.payload.getId() metadataPid = self.params.getProperty("metaPid", "DC") self.utils.add(self.index, "storage_id", self.oid) if self.pid == metadataPid: self.itemType = "object" else: self.oid += "/" + self.pid self.itemType = "datastream" self.utils.add(self.index, "identifier", self.pid) self.utils.add(self.index, "id", self.oid) self.utils.add(self.index, "item_type", self.itemType) self.utils.add(self.index, "last_modified", self.last_modified) self.utils.add(self.index, "harvest_config", self.params.getProperty("jsonConfigOid")) self.utils.add(self.index, "harvest_rules", self.params.getProperty("rulesOid")) self.item_security = [] self.owner = self.params.getProperty("owner", "guest") def __basicData(self): self.utils.add(self.index, "repository_name", self.params["repository.name"]) self.utils.add(self.index, "repository_type", self.params["repository.type"]) # VITAL integration vitalPid = self.params["vitalPid"] if vitalPid is not None: self.utils.add(self.index, "vitalPid", vitalPid) # Persistent Identifiers pidProperty = self.config.getString(None, ["curation", "pidProperty"]) if pidProperty is None: self.log.error("No configuration found for persistent IDs!") else: pid = self.params[pidProperty] if pid is not None: self.utils.add(self.index, "known_ids", pid) self.utils.add(self.index, "pidProperty", pid) self.utils.add(self.index, "oai_identifier", pid) self.utils.add(self.index, "oai_set", "default") # Publication published = self.params["published"] if published is not None: self.utils.add(self.index, "published", "true") def __security(self): # Security roles = self.utils.getRolesWithAccess(self.oid) if roles is not None: # For every role currently with access for role in roles: # Should show up, but during debugging we got a few if role != "": if role in self.item_security: # They still have access self.utils.add(self.index, "security_filter", role) else: # Their access has been revoked self.__revokeRoleAccess(role) # Now for every role that the new step allows access for role in self.item_security: if role not in roles: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) # No existing security else: if self.item_security is None: # Guest access if none provided so far self.__grantRoleAccess("guest") self.utils.add(self.index, "security_filter", role) else: # Otherwise use workflow security for role in self.item_security: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) users = self.utils.getUsersWithAccess(self.oid) if users is not None: # For every role currently with access for user in users: self.utils.add(self.index, "security_exception", user) # Ownership if self.owner is None: self.utils.add(self.index, "owner", "system") else: self.utils.add(self.index, "owner", self.owner) def __indexList(self, name, values): # convert to set so no duplicate values for value in HashSet(values): self.utils.add(self.index, name, value) def __grantRoleAccess(self, newRole): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("role", newRole) self.utils.setAccessSchema(schema, "derby") def __grantUserAccess(self, newUser): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("user", newUser) self.utils.setAccessSchema(schema, "derby") def __revokeRoleAccess(self, oldRole): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("role", oldRole) self.utils.removeAccessSchema(schema, "derby") def __revokeUserAccess(self, oldUser): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("user", oldUser) self.utils.removeAccessSchema(schema, "derby") def __metadata(self): self.title = None self.dcType = None self.descriptionList = [] self.creatorList = [] self.creationDate = [] self.contributorList = [] self.approverList = [] self.formatList = ["application/x-fascinator-package"] self.fulltext = [] self.relationDict = {} self.customFields = {} self.creatorFullNameMap = HashMap() self.grantNumberList = [] self.arrayBucket = HashMap() self.compFields = ["dc:coverage.vivo:DateTimeInterval", "locrel:prc.foaf:Person"] self.compFieldsConfig = {"dc:coverage.vivo:DateTimeInterval":{"delim":" to ","start":"start","end":"end"},"locrel:prc.foaf:Person":{"delim":", ","start":"familyName","end":"givenName"} } self.reportingFieldPrefix = "reporting_" self.embargoedDate = None # Try our data sources, order matters self.__workflow() # Some defaults if the above failed if self.title is None: self.title = "New Dataset" if self.formatList == []: source = self.object.getPayload(self.packagePid) self.formatList.append(source.getContentType()) # Index our metadata finally self.utils.add(self.index, "dc_title", self.title) if self.dcType is not None: self.utils.add(self.index, "dc_type", self.dcType) self.__indexList("dc_creator", self.creatorList) #no dc_author in schema.xml, need to check self.__indexList("dc_contributor", self.contributorList) self.__indexList("dc_description", self.descriptionList) self.__indexList("dc_format", self.formatList) self.__indexList("dc_date", self.creationDate) self.__indexList("full_text", self.fulltext) for key in self.customFields: self.__indexList(key, self.customFields[key]) for key in self.relationDict: self.__indexList(key, self.relationDict[key]) if self.arrayBucket.size() > 0: for arrFldName in self.arrayBucket.keySet(): if arrFldName.endswith("Person") or arrFldName.replace(self.reportingFieldPrefix, "") in self.compFields: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName).values()) else: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName)) if self.embargoedDate is not None: self.utils.add(self.index, "date_embargoed", self.embargoedDate+"T00:00:00Z") def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString(None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = ["title", "description", "manifest", "metaList", "relationships", "responses"] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith("skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len(arrParts) >= 2 and field.find(":Relationship.") == -1 and field.find("dc:type") == -1 and field.find("redbox:origin") == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx+len(arrParts[1])+1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] self.log.debug("Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix +compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith(self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % (fullField, self.compFieldsConfig[compfield]["delim"] ,value) if field.endswith(self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug("Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true") def __messages(self): if self.message_list is not None and len(self.message_list) > 0: msg = JsonSimple() msg.getJsonObject().put("oid", self.oid) message = msg.toString() for target in self.message_list: self.utils.sendMessage(target, message) def __getJsonPayload(self, pid): payload = self.object.getPayload(pid) json = self.utils.getJsonObject(payload.open()) payload.close() return json
# Java standard containers: map = HashMap() set = HashSet() for x, y in ValGen(10): map.put(x, y) set.add(y) set.add(y) print map print set # Iterating through a set: for z in set: print z, z.__class__ print map[3] # Uses Python dictionary indexing for x in map.keySet(): # keySet() is a Map method print x, map[x] # Using a Java class that you create yourself is # just as easy: jc = JavaClass() jc2 = JavaClass("Created within Jython") print jc2.getVal() jc.setVal("Using a Java class is trivial") print jc.getVal() print jc.getChars() jc.val = "Using bean properties" print jc.val #:~
def getActiveTransactions(self): activeTransactions = [] whereClauses = ArrayList() whereClauses.add("FUNCNAME IN ('SAPWL_TCODE_AGGREGATION','SAPWL_TCODE_AGGREGATION_COPY')"); result = self.executeQuery("TFDIR", whereClauses, "FUNCNAME")#@@CMD_PERMISION sap protocol execution functionName = None if result.next(): functionName = result.getString("FUNCNAME") if functionName == None: logger.warn('getActiveTransactions: active transaction function is not found') return activeTransactions day = self.__client.getProperty('from_date') if day == None: today = Date() sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(today) elif day.find('/') != -1: try: sfDate = SimpleDateFormat("MM/dd/yyyy") parsedDate = sfDate.parse(day) sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(parsedDate) except: logger.reportWarning('Failed to parse date ', day) logger.debug('Parsed start date:', day) logger.debug('Active transactions from data:', day) mapTransactionToUsers = None getUsers = Boolean.parseBoolean(self.__client.getProperty("get_users")) if getUsers: mapTransactionToUsers = HashMap() funcParams = HashMap() funcParams.put('READ_START_DATE', day) funcParams.put('READ_START_TIME', '000000') funcParams.put('READ_END_DATE', day) funcParams.put('READ_END_TIME', '235959') funcParams.put('READ_ONLY_MAINRECORDS', 'X') logger.debug('executing func:SAPWL_STATREC_FROM_REMOTE_SYS(', str(funcParams),')') fields = ArrayList() fields.add('TCODE') fields.add('ACCOUNT') usersResult = self.__client.executeFunction('SAPWL_STATREC_FROM_REMOTE_SYS', funcParams, 'NORMAL_RECORDS', fields) while usersResult.next(): transaction = usersResult.getString('TCODE') if len(transaction) > 0: user = usersResult.getString("ACCOUNT"); users = mapTransactionToUsers.get(transaction) if users == None: users = HashMap() mapTransactionToUsers.put(transaction,users) users.put(user,users); self.getSites() site = self.getSites().getCell(0,0) servers = self.getServers(site) numServers = servers.getRowCount() transactionToStats = HashMap() for j in range(numServers): try: instance = servers.getCell(j,0); logger.debug('getActiveTransactions:executing function[' + functionName + '] for instance [' + instance + ']') if functionName == 'SAPWL_TCODE_AGGREGATION_COPY': records = self.callSapwlTcodeAggregationCopy(instance,day) while records.next(): transaction = (str(records.getString(0))).strip() mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if stats == None: stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if mapUsers != None: stats.users = ArrayList(mapUsers.keySet()) if records.next(): stats.steps = stats.steps + int(float(records.getString(0))) if records.next(): stats.responseTime = stats.responseTime + int(float(records.getString(0))) if records.next(): stats.cpuTime = stats.cpuTime + int(float(records.getString(0))) if records.next(): stats.dbTime = stats.dbTime + int(float(records.getString(0))) if records.next(): stats.guiTime = stats.guiTime + int(float(records.getString(0))) if records.next(): stats.roundTrips = stats.roundTrips + int(float(records.getString(0))) if records.next(): stats.text = (str(records.getString(0))).strip() else: fields = ArrayList() fields.add('ENTRY_ID') fields.add('COUNT') fields.add('RESPTI') fields.add('CPUTI') fields.add('DBTIME') fields.add('GUITIME') fields.add('GUICNT') fields.add('TEXT') records = self.getApplicationStatistics(functionName, instance, day, fields) while records.next(): entryID = records.getString("ENTRY_ID"); transaction = self.getTransactionFromEntryID(entryID); mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if(stats == None): stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if(mapUsers != None): stats.users = ArrayList(mapUsers.keySet()) count = records.getString("COUNT") stats.steps = stats.steps + int(count) stats.responseTime = stats.responseTime + int(records.getString("RESPTI")) stats.cpuTime = stats.cpuTime + int(records.getString("CPUTI")) stats.dbTime = stats.dbTime + int(records.getString("DBTIME")) stats.guiTime = stats.guiTime + int(records.getString("GUITIME")) stats.roundTrips = stats.roundTrips + int(records.getString("GUICNT")) stats.text = records.getString("TEXT") except: msg = sys.exc_info()[1] strmsg = '%s' % msg if strmsg.find('NO_DATA_FOUND') != -1: logger.debug(strmsg) logger.reportWarning('No data found in the given time range') else: logger.debugException('Unexpected error getting transactions for function:' + str(functionName)) logger.reportWarning('Unexpected error getting transactions for function:' + str(functionName) + ':' + strmsg) transactions = ArrayList(transactionToStats.keySet()) logger.debug("getActiveTransactions: Found [" + str(transactions.size()) + "] active transactions") if logger.isDebugEnabled(): logger.debug("getActiveTransactions: transactions = " + str(transactions)) transactionsInfo = self.getTransactionsInfo(transactions) it = transactionToStats.values() for stats in it: prop = Properties() prop.setProperty('data_name', str(stats.transaction)) prop.setProperty('dialog_steps', str(stats.steps)) prop.setProperty('total_response_time', str(stats.responseTime)) prop.setProperty('average_response_time', str(stats.getAverageCPUTime())) prop.setProperty('total_cpu_time', str(stats.cpuTime)) prop.setProperty('average_cpu_time', str(stats.getAverageCPUTime())) prop.setProperty('round_trips', str(stats.roundTrips)) prop.setProperty('total_db_time', str(stats.dbTime)) prop.setProperty('average_db_time', str(stats.getAverageDBTime())) prop.setProperty('total_gui_time', str(stats.guiTime)) prop.setProperty('average_gui_time', str(stats.getAverageGUITime())) prop.setProperty('text', stats.text) prop.setProperty('saptransaction_averagedbtime', str(stats.users.size())) info = transactionsInfo.get(stats.transaction) if info != None: prop.setProperty('devclass', info.devclass) prop.setProperty('program', info.program) prop.setProperty('screen', info.screen) prop.setProperty('', info.screen) else: prop.setProperty('devclass', "") prop.setProperty('program', "") prop.setProperty('screen', "") prop.setProperty('version', "") activeTransactions.append(prop) return activeTransactions
# Java standard containers: jmap = HashMap() jset = HashSet() for x, y in ValGen(10): jmap.put(x, y) jset.add(y) jset.add(y) print(jmap) print(jset) # Iterating through a set: for z in jset: print(z, z.__class__) print(jmap[3]) # Uses Python dictionary indexing for x in jmap.keySet(): # keySet() is a Map method print(x, jmap[x]) # Using a Java class that you create yourself is # just as easy: jc = JavaClass() jc2 = JavaClass("Created within Jython") print(jc2.getVal()) jc.setVal("Using a Java class is trivial") print(jc.getVal()) print(jc.getChars()) jc.val = "Using bean properties" print(jc.val)
class PlayerPresenceManager(Subject): """ generated source for class PlayerPresenceManager """ monitoredPlayers = Map() class PlayerPresenceChanged(Event): """ generated source for class PlayerPresenceChanged """ class PlayerPresenceAdded(Event): """ generated source for class PlayerPresenceAdded """ class PlayerPresenceRemoved(Event): """ generated source for class PlayerPresenceRemoved """ @classmethod def isDifferent(cls, a, b): """ generated source for method isDifferent """ return not Objects == a, b INFO_PING_PERIOD_IN_SECONDS = 1 class PresenceMonitor(Thread): """ generated source for class PresenceMonitor """ def run(self): """ generated source for method run """ while True: try: Thread.sleep(self.INFO_PING_PERIOD_IN_SECONDS) except InterruptedException as e: e.printStackTrace() for key in keys: if presence == None: continue if presence.getStatusAge() > self.INFO_PING_PERIOD_IN_SECONDS * 1000: presence.updateInfo() if self.isDifferent(old_status, new_status): notifyObservers(self.PlayerPresenceChanged()) elif self.isDifferent(old_name, new_name): notifyObservers(self.PlayerPresenceChanged()) def __init__(self): """ generated source for method __init__ """ super(PlayerPresenceManager, self).__init__() self.monitoredPlayers = HashMap() loadPlayersJSON() if len(self.monitoredPlayers) == 0: try: # When starting from a blank slate, add some initial players to the # monitoring list just so that it's clear how it works. addPlayer("127.0.0.1:9147") addPlayer("127.0.0.1:9148") except InvalidHostportException as e: self.PresenceMonitor().start() @SuppressWarnings("serial") class InvalidHostportException(Exception): """ generated source for class InvalidHostportException """ def addPlayerSilently(self, hostport): """ generated source for method addPlayerSilently """ try: if not self.monitoredPlayers.containsKey(hostport): self.monitoredPlayers.put(hostport, presence) return presence else: return self.monitoredPlayers.get(hostport) except ArrayIndexOutOfBoundsException as e: raise self.InvalidHostportException() except NumberFormatException as e: raise self.InvalidHostportException() def addPlayer(self, hostport): """ generated source for method addPlayer """ presence = self.addPlayerSilently(hostport) notifyObservers(self.PlayerPresenceAdded()) savePlayersJSON() return presence def removePlayer(self, hostport): """ generated source for method removePlayer """ self.monitoredPlayers.remove(hostport) notifyObservers(self.PlayerPresenceRemoved()) savePlayersJSON() def getPresence(self, hostport): """ generated source for method getPresence """ return self.monitoredPlayers.get(hostport) def getSortedPlayerNames(self): """ generated source for method getSortedPlayerNames """ return TreeSet(self.monitoredPlayers.keySet()) observers = HashSet() def addObserver(self, observer): """ generated source for method addObserver """ self.observers.add(observer) def notifyObservers(self, event): """ generated source for method notifyObservers """ for observer in observers: observer.observe(event) playerListFilename = ".ggpserver-playerlist.json" def savePlayersJSON(self): """ generated source for method savePlayersJSON """ try: playerListJSON.put("hostports", self.monitoredPlayers.keySet()) if not file_.exists(): file_.createNewFile() bw.write(playerListJSON.__str__()) bw.close() except IOException as ie: ie.printStackTrace() except JSONException as e: e.printStackTrace() def loadPlayersJSON(self): """ generated source for method loadPlayersJSON """ try: if not file_.exists(): return try: while (line = br.readLine()) != None: pdata.append(line) finally: br.close() if playerListJSON.has("hostports"): while i < len(theHostports): try: self.addPlayerSilently(theHostports.get(i).__str__()) except InvalidHostportException as e: e.printStackTrace() i += 1 except IOException as ie: ie.printStackTrace() except JSONException as e: e.printStackTrace()
def getTransactionsInfo(self, transactions): mapTransactionToInfo = HashMap() mapProgramToTransaction = HashMap() if (transactions == None) or (len(transactions) == 0): logger.info("getTransactionsInfo: transactions list is empty") return mapTransactionToInfo transactionsRS = self.__client.executeQuery('TSTC', '', 'TCODE', transactions, 'TCODE,PGMNA,DYPNO')#@@CMD_PERMISION sap protocol execution while transactionsRS.next(): transaction = transactionsRS.getString("TCODE") program = transactionsRS.getString("PGMNA") screen = transactionsRS.getString("DYPNO") if logger.isDebugEnabled(): logger.debug("-------------------------------------------------------") logger.debug("getTransactionsInfo: transaction = " + transaction) logger.debug("getTransactionsInfo: program = " + program) logger.debug("getTransactionsInfo: screen = " + screen) logger.debug("-------------------------------------------------------") if (program == None) or (len(program) == 0): program = "N/A" logger.info("getTransactionsInfo: program for transaction [" + str(transaction) + "] is no available - setting to N/A.") info = TransactionInfo(transaction,program,screen) mapTransactionToInfo.put(transaction,info) transForProgram = mapProgramToTransaction.get(program) if transForProgram == None: transForProgram = ArrayList() mapProgramToTransaction.put(program,transForProgram) transForProgram.add(transaction) if logger.isDebugEnabled(): logger.debug("getTransactionsInfo: mapProgramToTransaction = " + str(mapProgramToTransaction)) if len(mapProgramToTransaction) == 0: logger.info("getTransactionsInfo: failed to get programs for transactions " + str(transactions)) return mapProgramToTransaction objNames = ArrayList(mapProgramToTransaction.keySet()) objNames.addAll(mapTransactionToInfo.keySet()) programsRS = self.__client.executeQuery('TADIR', "(OBJECT = 'PROG' OR OBJECT = 'TRAN') AND ", 'OBJ_NAME', objNames, 'OBJECT,OBJ_NAME,VERSID,DEVCLASS')#@@CMD_PERMISION sap protocol execution while programsRS.next(): objectType = programsRS.getString("OBJECT") if objectType == "PROG": program = programsRS.getString("OBJ_NAME") version = programsRS.getString("VERSID") transForProgram = mapProgramToTransaction.get(program) if transForProgram != None: for ti in transForProgram: info = mapTransactionToInfo.get(ti) if info == None: logger.info("program: Failed to find info for transaction [" + str(transaction) + "]") else: info.version = version else: logger.info("getTransactionsInfo: failed getting transactions for program [" + str(program) + "]") else: # transaction devclass = programsRS.getString("DEVCLASS"); transaction = programsRS.getString("OBJ_NAME") info = mapTransactionToInfo.get(transaction) if info == None: logger.info("transaction: Failed to find info for transaction [" + str(transaction) + "]") info = TransactionInfo(transaction,"N/A","") mapTransactionToInfo.put(transaction,info) info.devclass = devclass if logger.isDebugEnabled(): logger.debug("--------------------------------------------------") logger.debug("getTransactionsInfo: returning transaction info " + str(mapTransactionToInfo)) logger.debug("--------------------------------------------------") return mapTransactionToInfo
def getInstallPathUnix(webserver_name, shellUtils): path = '/opt/sadmin/sweapp/bin' if string.find(webserver_name, 'Netscape-Enterprise') >= 0: data = shellUtils.execCmd('ps -ef | grep ns-http')#@@CMD_PERMISION siebel protocol execution rows = string.split(data, '\n') # can be more than one process for each server - keep only one path paths = HashMap() for row in rows: pattern = Pattern('\s*-d\s*([^\s]*)') match = pattern.matcher(row) if match.find() == 1: configPath = match.group(1) paths.put(configPath,configPath) it = paths.keySet().iterator() while it.hasNext(): path = it.next() confFile = None confFilePath = path + '/obj.conf' try: confFile = shellUtils.safecat(confFilePath) if not confFile: raise ValueError except: logger.debug("Failed reading config file '%s'" % confFilePath) else: pattern = Pattern('\s*dir\s*=\s*"([^\s]*sweapp[^\s/]*)') match = pattern.matcher(confFile) if match.find() == 1: path = match.group(1) if path != '': path = path + '/bin' break else: data = shellUtils.execCmd('ps -ef | grep httpd')#@@CMD_PERMISION siebel protocol execution paths = HashMap() pattern = Pattern('\s*-d\s*([^\s]*)') match = pattern.matcher(data) while match.find() == 1: configPath = match.group(1) paths.put(configPath,configPath) logger.debug(paths) it = paths.keySet().iterator() while it.hasNext(): path = it.next() configFilePath = path + '/conf/httpd.conf' confFile = None try: confFile = shellUtils.safecat(configFilePath) if not confFile: raise ValueError except: logger.debug("Failed reading config file '%s'" % configFilePath) else: pattern = Pattern('\sSiebelHome\s*([^\s]*)') match = pattern.matcher(confFile) if match.find() == 1: path = match.group(1) if path != '': path = path + '/bin' break return path
class TtlCache(Map, K, V): """ generated source for class TtlCache """ class Entry(object): """ generated source for class Entry """ ttl = int() value = V() def __init__(self, value, ttl): """ generated source for method __init__ """ self.value = value self.ttl = ttl @SuppressWarnings("unchecked") def equals(self, o): """ generated source for method equals """ if isinstance(o, (self.Entry, )): return (o).value == self.value return False contents = Map() ttl = int() def __init__(self, ttl): """ generated source for method __init__ """ super(TtlCache, self).__init__() self.contents = HashMap() self.ttl = ttl @synchronized def containsKey(self, key): """ generated source for method containsKey """ return self.contents.containsKey(key) @synchronized def get(self, key): """ generated source for method get """ entry = self.contents.get(key) if entry == None: return None # Reset the TTL when a value is accessed directly. entry.ttl = self.ttl return entry.value @synchronized def prune(self): """ generated source for method prune """ toPrune = ArrayList() for key in contents.keySet(): if entry.ttl == 0: toPrune.add(key) entry.ttl -= 1 for key in toPrune: self.contents.remove(key) @synchronized def put(self, key, value): """ generated source for method put """ x = self.contents.put(key, self.Entry(value, self.ttl)) if x == None: return None return x.value @synchronized def size(self): """ generated source for method size """ return len(self.contents) @synchronized def clear(self): """ generated source for method clear """ self.contents.clear() @synchronized def containsValue(self, value): """ generated source for method containsValue """ return self.contents.containsValue(value) @synchronized def isEmpty(self): """ generated source for method isEmpty """ return self.contents.isEmpty() @synchronized def keySet(self): """ generated source for method keySet """ return self.contents.keySet() @synchronized def putAll(self, m): """ generated source for method putAll """ for anEntry in m.entrySet(): self.put(anEntry.getKey(), anEntry.getValue()) @synchronized def remove(self, key): """ generated source for method remove """ return self.contents.remove(key).value @synchronized def values(self): """ generated source for method values """ theValues = HashSet() for e in contents.values(): theValues.add(e.value) return theValues class entrySetMapEntry(Map, Entry, K, V): """ generated source for class entrySetMapEntry """ key = K() value = V() def __init__(self, k, v): """ generated source for method __init__ """ super(entrySetMapEntry, self).__init__() self.key = k self.value = v def getKey(self): """ generated source for method getKey """ return self.key def getValue(self): """ generated source for method getValue """ return self.value def setValue(self, value): """ generated source for method setValue """ return (self.value = value) @synchronized def entrySet(self): """ generated source for method entrySet """ theEntries = HashSet() for e in contents.entrySet(): theEntries.add(self.entrySetMapEntry(e.getKey(), e.getValue().value)) return theEntries
class IndexData: def __activate__(self, context): # Prepare variables self.index = context["fields"] self.object = context["object"] self.payload = context["payload"] self.params = context["params"] self.utils = context["pyUtils"] self.config = context["jsonConfig"] self.log = context["log"] self.last_modified = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) self.log.debug("Indexing Metadata Record '{}' '{}'", self.object.getId(), self.payload.getId()) # Common data self.__newDoc() self.packagePid = None pidList = self.object.getPayloadIdList() for pid in pidList: if pid.endswith(".tfpackage"): self.packagePid = pid # Real metadata if self.itemType == "object": self.__basicData() self.__metadata() # Some of the above steps may request some # messages be sent, particularly workflows self.__messages() # Make sure security comes after workflows self.__security() def __newDoc(self): self.oid = self.object.getId() self.pid = self.payload.getId() metadataPid = self.params.getProperty("metaPid", "DC") self.utils.add(self.index, "storage_id", self.oid) if self.pid == metadataPid: self.itemType = "object" else: self.oid += "/" + self.pid self.itemType = "datastream" self.utils.add(self.index, "identifier", self.pid) self.utils.add(self.index, "id", self.oid) self.utils.add(self.index, "item_type", self.itemType) self.utils.add(self.index, "last_modified", self.last_modified) self.utils.add(self.index, "harvest_config", self.params.getProperty("jsonConfigOid")) self.utils.add(self.index, "harvest_rules", self.params.getProperty("rulesOid")) self.item_security = [] self.owner = self.params.getProperty("owner", "guest") formatter = SimpleDateFormat('yyyyMMddHHmmss') self.params.setProperty("last_modified", formatter.format(Date())) self.utils.add(self.index, "date_object_created", self.params.getProperty("date_object_created")) self.params.setProperty( "date_object_modified", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime())) self.utils.add(self.index, "date_object_modified", self.params.getProperty("date_object_modified")) def __basicData(self): self.utils.add(self.index, "repository_name", self.params["repository.name"]) self.utils.add(self.index, "repository_type", self.params["repository.type"]) if self.params["date_transitioned"] is not None: self.utils.add(self.index, "date_transitioned", self.params["date_transitioned"]) # VITAL integration vitalPid = self.params["vitalPid"] if vitalPid is not None: self.utils.add(self.index, "vitalPid", vitalPid) # Persistent Identifiers pidProperty = self.config.getString(None, ["curation", "pidProperty"]) if pidProperty is None: self.log.error("No configuration found for persistent IDs!") else: pid = self.params[pidProperty] if pid is not None: self.utils.add(self.index, "known_ids", pid) self.utils.add(self.index, "pidProperty", pid) self.utils.add(self.index, "oai_identifier", pid) self.utils.add(self.index, "oai_set", "default") # Publication published = self.params["published"] if published is not None: self.utils.add(self.index, "published", "true") def __security(self): # Security roles = self.utils.getRolesWithAccess(self.oid) if roles is not None: # For every role currently with access for role in roles: # Should show up, but during debugging we got a few if role != "": if role in self.item_security: # They still have access self.utils.add(self.index, "security_filter", role) else: # Their access has been revoked self.__revokeRoleAccess(role) # Now for every role that the new step allows access for role in self.item_security: if role not in roles: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) # No existing security else: if self.item_security is None: # Guest access if none provided so far self.__grantRoleAccess("guest") self.utils.add(self.index, "security_filter", role) else: # Otherwise use workflow security for role in self.item_security: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) users = self.utils.getUsersWithAccess(self.oid) if users is not None: # For every role currently with access for user in users: self.utils.add(self.index, "security_exception", user) # Ownership if self.owner is None: self.utils.add(self.index, "owner", "system") else: self.utils.add(self.index, "owner", self.owner) def __indexList(self, name, values): # convert to set so no duplicate values for value in HashSet(values): self.utils.add(self.index, name, value) def __grantRoleAccess(self, newRole): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("role", newRole) self.utils.setAccessSchema(schema) def __grantUserAccess(self, newUser): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("user", newUser) self.utils.setAccessSchema(schema) def __revokeRoleAccess(self, oldRole): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("role", oldRole) self.utils.removeAccessSchema(schema) def __revokeUserAccess(self, oldUser): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("user", oldUser) self.utils.removeAccessSchema(schema) def __metadata(self): self.title = None self.dcType = None self.descriptionList = [] self.creatorList = [] self.creationDate = [] self.contributorList = [] self.approverList = [] self.formatList = ["application/x-fascinator-package"] self.fulltext = [] self.relationDict = {} self.customFields = {} self.creatorFullNameMap = HashMap() self.grantNumberList = [] self.arrayBucket = HashMap() self.compFields = [ "dc:coverage.vivo:DateTimeInterval", "locrel:prc.foaf:Person" ] self.compFieldsConfig = { "dc:coverage.vivo:DateTimeInterval": { "delim": " to ", "start": "start", "end": "end" }, "locrel:prc.foaf:Person": { "delim": ", ", "start": "familyName", "end": "givenName" } } self.reportingFieldPrefix = "reporting_" self.embargoedDate = None self.createTimeStamp = None # Try our data sources, order matters self.__workflow() # Some defaults if the above failed if self.title is None: self.title = "New Dataset" if self.formatList == []: source = self.object.getPayload(self.packagePid) self.formatList.append(source.getContentType()) # Index our metadata finally self.utils.add(self.index, "dc_title", self.title) if self.dcType is not None: self.utils.add(self.index, "dc_type", self.dcType) self.__indexList( "dc_creator", self.creatorList) #no dc_author in schema.xml, need to check self.__indexList("dc_contributor", self.contributorList) self.__indexList("dc_description", self.descriptionList) self.__indexList("dc_format", self.formatList) self.__indexList("dc_date", self.creationDate) self.__indexList("full_text", self.fulltext) for key in self.customFields: self.__indexList(key, self.customFields[key]) for key in self.relationDict: self.__indexList(key, self.relationDict[key]) if self.arrayBucket.size() > 0: for arrFldName in self.arrayBucket.keySet(): if arrFldName.endswith("Person") or arrFldName.replace( self.reportingFieldPrefix, "") in self.compFields: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName).values()) else: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName)) if self.embargoedDate is not None: self.utils.add(self.index, "date_embargoed", self.embargoedDate + "T00:00:00Z") if self.createTimeStamp is None: self.utils.add( self.index, "create_timestamp", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime())) def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString( None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put( "label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = [ "title", "description", "manifest", "metaList", "relationships", "responses" ] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value elif field == "create_timestamp": self.createTimeStamp = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith( "skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len( arrParts) >= 2 and field.find( ":Relationship.") == -1 and field.find( "dc:type") == -1 and field.find( "redbox:origin" ) == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx + len( arrParts[1]) + 1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[ suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[: prefixEndIdx] self.log.debug( "Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put( arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix + compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith( self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % ( fullField, self.compFieldsConfig[compfield]["delim"], value) if field.endswith( self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug( "Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true") def __messages(self): if self.message_list is not None and len(self.message_list) > 0: msg = JsonSimple() msg.getJsonObject().put("oid", self.oid) message = msg.toString() for target in self.message_list: self.utils.sendMessage(target, message) def __getJsonPayload(self, pid): payload = self.object.getPayload(pid) json = self.utils.getJsonObject(payload.open()) payload.close() return json
def resolveHostForIP(host): hostName = "NONE" # Correction for if the entry in the server.xml is an IP address try: hName = Inet4Address.getByName(host) hostName = hName.getHostName() print " Resolved Host " + host + " to " + hostName if hostName != host: return hostName except UnknownHostException, e: print " Couldn't resolve " + host + " to a host name" # Correction for if the entries in the list are IP addresses iterator = agentMap.keySet().iterator() while iterator.hasNext(): try: agentHost = iterator.next() hName = Inet4Address.getByName(agentHost) hName = hName.getHostName() if checkForSimilarity(host, hName): print " Detected match for IP of " + host + " and MiddleWare Agent " + agentHost return agentHost except UnknownHostException, e: print " Error resolving Host - UnkownHostException" return host ############# Check for Similarity Logic ################## def checkForSimilarity(hostOne, hostTwo):
def getInstallPathUnix(webserver_name, shellUtils): path = '/opt/sadmin/sweapp/bin' if string.find(webserver_name, 'Netscape-Enterprise') >= 0: data = shellUtils.execCmd('ps -ef | grep ns-http' ) #@@CMD_PERMISION siebel protocol execution rows = string.split(data, '\n') # can be more than one process for each server - keep only one path paths = HashMap() for row in rows: pattern = Pattern('\s*-d\s*([^\s]*)') match = pattern.matcher(row) if match.find() == 1: configPath = match.group(1) paths.put(configPath, configPath) it = paths.keySet().iterator() while it.hasNext(): path = it.next() confFile = None confFilePath = path + '/obj.conf' try: confFile = shellUtils.safecat(confFilePath) if not confFile: raise ValueError except: logger.debug("Failed reading config file '%s'" % confFilePath) else: pattern = Pattern('\s*dir\s*=\s*"([^\s]*sweapp[^\s/]*)') match = pattern.matcher(confFile) if match.find() == 1: path = match.group(1) if path != '': path = path + '/bin' break else: data = shellUtils.execCmd( 'ps -ef | grep httpd') #@@CMD_PERMISION siebel protocol execution paths = HashMap() pattern = Pattern('\s*-d\s*([^\s]*)') match = pattern.matcher(data) while match.find() == 1: configPath = match.group(1) paths.put(configPath, configPath) logger.debug(paths) it = paths.keySet().iterator() while it.hasNext(): path = it.next() configFilePath = path + '/conf/httpd.conf' confFile = None try: confFile = shellUtils.safecat(configFilePath) if not confFile: raise ValueError except: logger.debug("Failed reading config file '%s'" % configFilePath) else: pattern = Pattern('\sSiebelHome\s*([^\s]*)') match = pattern.matcher(confFile) if match.find() == 1: path = match.group(1) if path != '': path = path + '/bin' break return path