def getPublishersFromDistributor(self, oshv, distributor, distributorDatabaseName, sqlServerId): #check if i am a distributor first rs = self.connection.doCall('exec sp_helpdistpublisher') publishers = HashMap() sqlServers = HashMap() while (rs.next()): publisherName = rs.getString('name') publisher = ObjectStateHolder('sqlserverpublisher') sqlServer = self.createSqlServer(publisherName, oshv, sqlServerId) publisher.setContainer(sqlServer) publisher.setAttribute(Queries.DATA_NAME, publisherName) publishers.put(publisherName, publisher) sqlServers.put(publisherName, sqlServer) oshv.add(sqlServer) oshv.add(publisher) oshv.add(modeling.createLinkOSH('dblink', publisher, distributor)) #add the dblink between the distributor and the publisher rs.close() if (publishers.size() == 0): return #for each publisher get the published dbs workingDatabase = self.connection.getWorkingDatabase() self.connection.setWorkingDatabase(distributorDatabaseName) itr = publishers.keySet().iterator() while (itr.hasNext()): publisherName = itr.next() publisher = publishers.get(publisherName) sqlServer = sqlServers.get(publisherName) self.getPublications(publisherName, sqlServer, publisher, oshv, sqlServerId) self.connection.setWorkingDatabase(workingDatabase)
def getPublishersFromDistributor(self,oshv,distributor, distributorDatabaseName,sqlServerId): #check if i am a distributor first rs = self.connection.doCall('exec sp_helpdistpublisher') publishers = HashMap() sqlServers = HashMap() while(rs.next()): publisherName = rs.getString('name') publisher = ObjectStateHolder('sqlserverpublisher') sqlServer = self.createSqlServer(publisherName,oshv,sqlServerId) publisher.setContainer(sqlServer) publisher.setAttribute(Queries.DATA_NAME,publisherName) publishers.put(publisherName,publisher) sqlServers.put(publisherName,sqlServer) oshv.add(sqlServer) oshv.add(publisher) oshv.add(modeling.createLinkOSH('dblink',publisher,distributor)) #add the dblink between the distributor and the publisher rs.close() if(publishers.size() == 0): return #for each publisher get the published dbs workingDatabase = self.connection.getWorkingDatabase() self.connection.setWorkingDatabase(distributorDatabaseName) itr = publishers.keySet().iterator() while (itr.hasNext()): publisherName = itr.next() publisher = publishers.get(publisherName) sqlServer = sqlServers.get(publisherName) self.getPublications(publisherName,sqlServer,publisher,oshv,sqlServerId) self.connection.setWorkingDatabase(workingDatabase)
def recordTemplates(self, description): """ generated source for method recordTemplates """ templates = HashMap() for gdl in description: if not name.getValue() == "base": if not templates.containsKey(name): templates.put(name, ArrayList()) templates.get(name).add(rule) return templates
def getNextStates_0(self, state, role): """ generated source for method getNextStates_0 """ nextStates = HashMap() roleIndices = getRoleIndices() for moves in getLegalJointMoves(state): if not nextStates.containsKey(move): nextStates.put(move, ArrayList()) nextStates.get(move).add(self.getNextState(state, moves)) return nextStates
class Substitution(object): """ generated source for class Substitution """ contents = Map() def __init__(self): """ generated source for method __init__ """ self.contents = HashMap() def compose(self, thetaPrime): """ generated source for method compose """ result = Substitution() result.contents.putAll(self.contents) result.contents.putAll(thetaPrime.contents) return result def contains(self, variable): """ generated source for method contains """ return self.contents.containsKey(variable) def equals(self, o): """ generated source for method equals """ if (o != None) and (isinstance(o, (Substitution, ))): return substitution.contents == self.contents return False def get(self, variable): """ generated source for method get """ return self.contents.get(variable) def hashCode(self): """ generated source for method hashCode """ return self.contents.hashCode() def put(self, variable, term): """ generated source for method put """ self.contents.put(variable, term) # # * Creates an identical substitution. # * # * @return A new, identical substitution. # def copy(self): """ generated source for method copy """ copy = Substitution() copy.contents.putAll(self.contents) return copy def __str__(self): """ generated source for method toString """ sb = StringBuilder() sb.append("{ ") for variable in contents.keySet(): sb.append(variable + "/" + self.contents.get(variable) + " ") sb.append("}") return sb.__str__()
def recordLegalPropositions(self): """ generated source for method recordLegalPropositions """ legalPropositions = HashMap() for proposition in propositions: if not (isinstance(, (GdlRelation, ))): continue if relation.__name__.getValue() == "legal": if not legalPropositions.containsKey(r): legalPropositions.put(r, HashSet()) legalPropositions.get(r).add(proposition) return legalPropositions
def sort_deployeds(): deployeds_per_key = HashMap() for _delta in deltas.deltas: deployed = _delta.deployedOrPrevious if deployed.type == "rh.TomcatWARModule": key = "%s___%s" % (deployed.appName, deployed.container.name) if deployeds_per_key.containsKey(key): deployeds_per_key.get(key).add(deployed) else: result = HashSet() result.add(deployed) deployeds_per_key.put(key, result) return deployeds_per_key
def recordGoalPropositions(self): """ generated source for method recordGoalPropositions """ goalPropositions = HashMap() for proposition in propositions: # Skip all propositions that aren't GdlRelations. if not (isinstance(, (GdlRelation, ))): continue if not relation.__name__.getValue() == "goal": continue if not goalPropositions.containsKey(theRole): goalPropositions.put(theRole, HashSet()) goalPropositions.get(theRole).add(proposition) return goalPropositions
def updateLocalRecordRelations(self, jobItems): oidIdentifierMap = HashMap() for jobItem in jobItems: oidIdentifierMap.put(jobItem.get("oid"),jobItem.get("required_identifiers")[0].get("identifier")) for jobItem in jobItems: type = jobItem.get("type"); targetSystem = self.systemConfig.getString(None, "curation", "supported-types", type); if targetSystem == "redbox": oid = jobItem.get("oid") digitalObject = StorageUtils.getDigitalObject(self.services.getStorage(), oid) tfPackagePid = self.getPackageData(digitalObject) metadataJsonPayload = digitalObject.getPayload(tfPackagePid) metadataJsonInstream = metadataJsonPayload.open() metadataJson = JsonSimple(metadataJsonInstream) metadataJsonPayload.close() relationships = metadataJson.getArray("relationships") if relationships is not None: for relationship in relationships: system = relationship.get("system") if system != "redbox" or system != None: url = self.systemConfig.getString("can't find it", "curation","external-system-urls","get-oid-for-identifier",system) client = BasicHttpClient(url+ "&identifier="+relationship.get("identifier")) get = GetMethod(url+ "&identifier="+relationship.get("identifier")) client.executeMethod(get) if get.getStatusCode() == 200: response = JsonSimple(get.getResponseBodyAsString()) relationship.put("curatedPid",oidIdentifierMap.get(response.getString(None,"oid"))) relationship.put("isCurated",True) #Now update the relationship on Mint's side break istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes()) StorageUtils.createOrUpdatePayload(digitalObject,tfPackagePid,istream)
def getMappedUser(self, configurationAttributes, requestParameters, saml_response_attributes): # Convert Saml result attributes keys to lover case saml_response_normalized_attributes = HashMap() for saml_response_attribute_entry in saml_response_attributes.entrySet(): saml_response_normalized_attributes.put(StringHelper.toLowerCase(saml_response_attribute_entry.getKey()), saml_response_attribute_entry.getValue()) currentAttributesMapping = self.prepareCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters) print "Asimba. Get mapped user. Using next attributes mapping '%s'" % currentAttributesMapping newUser = User() # Set custom object classes if self.userObjectClasses != None: print "Asimba. Get mapped user. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList(self.userObjectClasses) newUser.setCustomObjectClasses(self.userObjectClasses) for attributesMappingEntry in currentAttributesMapping.entrySet(): idpAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() if self.debugEnrollment: print "Asimba. Get mapped user. Trying to map '%s' into '%s'" % (idpAttribute, localAttribute) localAttributeValue = saml_response_normalized_attributes.get(idpAttribute) if localAttributeValue != None: if self.debugEnrollment: print "Asimba. Get mapped user. Setting attribute '%s' value '%s'" % (localAttribute, localAttributeValue) newUser.setAttribute(localAttribute, localAttributeValue) else: if newUser.getAttribute(localAttribute) == None: newUser.setAttribute(localAttribute, ArrayList()) return newUser
class CyclicTypeRecorder(object): def __init__(self): self.count = 0 self.elements = HashMap() self.used = HashSet() def push(self, t): self.count += 1 self.elements[t] = self.count return self.count def pop(self, t): del self.elements[t] if t in self.used: self.used.remove(t) def visit(self, t): i = self.elements.get(t) if i is not None: self.used.add(t) return i def isUsed(self, t): return t in self.used
def getMappedAllAttributesUser(self, saml_response_attributes): user = User() # Set custom object classes if self.userObjectClasses != None: print "Saml. Get mapped all attributes user. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList(self.userObjectClasses) user.setCustomObjectClasses(self.userObjectClasses) # Prepare map to do quick mapping attributeService = AttributeService.instance() ldapAttributes = attributeService.getAllAttributes() samlUriToAttributesMap = HashMap() for ldapAttribute in ldapAttributes: saml2Uri = ldapAttribute.getSaml2Uri() if (saml2Uri == None): saml2Uri = attributeService.getDefaultSaml2Uri(ldapAttribute.getName()) samlUriToAttributesMap.put(saml2Uri, ldapAttribute.getName()) customAttributes = ArrayList() for key in saml_response_attributes.keySet(): ldapAttributeName = samlUriToAttributesMap.get(key) if ldapAttributeName == None: print "Saml. Get mapped all attributes user. Skipping saml attribute: '%s'" % key continue if StringHelper.equalsIgnoreCase(ldapAttributeName, "uid"): continue attribute = CustomAttribute(ldapAttributeName) attribute.setValues(saml_response_attributes.get(key)) customAttributes.add(attribute) user.setCustomAttributes(customAttributes) return user
def getMappedUser(self, configurationAttributes, requestParameters, saml_response_attributes): # Convert Saml result attributes keys to lover case saml_response_normalized_attributes = HashMap() for saml_response_attribute_entry in saml_response_attributes.entrySet(): saml_response_normalized_attributes.put(StringHelper.toLowerCase(saml_response_attribute_entry.getKey()), saml_response_attribute_entry.getValue()) currentAttributesMapping = self.prepareCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters) print "Saml. Get mapped user. Using next attributes mapping '%s'" % currentAttributesMapping newUser = User() # Set custom object classes if self.userObjectClasses != None: print "Saml. Get mapped user. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList(self.userObjectClasses) newUser.setCustomObjectClasses(self.userObjectClasses) for attributesMappingEntry in currentAttributesMapping.entrySet(): idpAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() if self.debugEnrollment: print "Saml. Get mapped user. Trying to map '%s' into '%s'" % (idpAttribute, localAttribute) localAttributeValue = saml_response_normalized_attributes.get(idpAttribute) if (localAttributeValue != None): if self.debugEnrollment: print "Saml. Get mapped user. Setting attribute '%s' value '%s'" % (localAttribute, localAttributeValue) newUser.setAttribute(localAttribute, localAttributeValue) return newUser
def getMappedAllAttributesUser(self, saml_response_attributes): user = User() # Set custom object classes if self.userObjectClasses != None: print "Asimba. Get mapped all attributes user. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList(self.userObjectClasses) user.setCustomObjectClasses(self.userObjectClasses) # Prepare map to do quick mapping attributeService = CdiUtil.bean(AttributeService) ldapAttributes = attributeService.getAllAttributes() samlUriToAttributesMap = HashMap() for ldapAttribute in ldapAttributes: saml2Uri = ldapAttribute.getSaml2Uri() if saml2Uri == None: saml2Uri = attributeService.getDefaultSaml2Uri(ldapAttribute.getName()) samlUriToAttributesMap.put(saml2Uri, ldapAttribute.getName()) customAttributes = ArrayList() for key in saml_response_attributes.keySet(): ldapAttributeName = samlUriToAttributesMap.get(key) if ldapAttributeName == None: print "Asimba. Get mapped all attributes user. Skipping saml attribute: '%s'" % key continue if StringHelper.equalsIgnoreCase(ldapAttributeName, "uid"): continue attribute = CustomAttribute(ldapAttributeName) attribute.setValues(saml_response_attributes.get(key)) customAttributes.add(attribute) user.setCustomAttributes(customAttributes) return user
class FacetList: def __init__(self, name, json): self.__facetMap = HashMap() self.__facetList = ArrayList() entries = json.getList("facet_counts/facet_fields/" + name) for i in range(0, len(entries), 2): value = entries[i] count = entries[i+1] if count > 0: facet = Facet(name, value, count) self.__facetMap.put(value, facet) slash = value.rfind("/") if slash == -1: self.__facetList.add(facet) else: parent = self.__getFacet(value[:slash]) if parent is not None: parent.addSubFacet(facet) def __getFacet(self, name): return self.__facetMap.get(name) def getJsonList(self): jsonList = ArrayList() for facets in self.__facetList: jsonList.add(facets.getJson()) return jsonList
class FacetList: def __init__(self, name, results): self.__facetMap = HashMap() self.__facetList = ArrayList() facets = results.getFacets() if facets is None: return facet = facets.get(name) if facet is None: return facetData = facet.values() for value in facetData.keySet(): count = facetData.get(value) facet = Facet(name, value, count) self.__facetMap.put(value, facet) slash = value.rfind("/") if slash == -1: self.__facetList.add(facet) else: parent = self.__getFacet(value[:slash]) if parent is not None: parent.addSubFacet(facet) def __getFacet(self, name): return self.__facetMap.get(name) def getJsonList(self): jsonList = ArrayList() for facets in self.__facetList: jsonList.add(facets.getJson()) return jsonList
def setupCoreGroupBridgeMeshed(coregroups, servers): cgbsettings = convertToList(AdminConfig.list("CoreGroupBridgeSettings"))[0] # # create accesspoints and accesspointgroups (one per node) # nodes = convertToList(AdminConfig.list("Node")) cgapHash = HashMap() cgaps = [] # # enumerate existing cgaps and delete existing bridge interfaces # acCgaps = convertToList(AdminConfig.list("CoreGroupAccessPoint")) for cgap in acCgaps: cgaps.append(cgap) cgapname = AdminConfig.showAttribute(cgap, "name") coregroupname = AdminConfig.showAttribute(cgap, "coreGroup") #print "Existing Core Group Access Point: "+cgapname+"("+coregroupname+")" cgapHash.put(coregroupname, cgap) # # delete existing BridgeInterfaces # print "Removing Bride Interfaces for existing Core Group Access Point:", cgapname, "(", coregroupname, ")" acBI = convertToList(AdminConfig.list("BridgeInterface", cgap)) for bi in acBI: AdminConfig.remove(bi) # # create any new cgap and recreate bridgeinterfaces # print "Creating new Core Group Access Points and Bridge Interfaces" for node in nodes: nodeName = AdminConfig.showAttribute(node, "name") nodeservers = convertToList(AdminConfig.list("Server", node)) for server in nodeservers: serverName = AdminConfig.showAttribute(server, "name") serverType = AdminConfig.showAttribute(server, "serverType") srv = servers.get(nodeName + "/" + serverName) coregroup = getCoreGroupForServer(srv, coregroups, clusters) if (srv != None and srv.isBridge == 1): cgapName = coregroup.name + "_" + "CGAP" cgap = cgapHash.get(coregroup.name) if (cgap == None): print " Creating Core Group Access Point '", cgapName, "' for coregroup '", coregroup.name, "'" cgap = AdminConfig.create( "CoreGroupAccessPoint", cgbsettings, [["name", cgapName], ["coreGroup", coregroup.name]]) cgaps.append(cgap) cgapHash.put(coregroup.name, cgap) print " Creating Bridge Interface (node=" + nodeName + ", server=" + serverName + ") for Core Group Access Point '", cgap, "'" AdminConfig.create("BridgeInterface", cgap, [["node", nodeName], ["server", serverName], ["chain", "DCS"]]) apg = convertToList(AdminConfig.list("AccessPointGroup"))[0] if (apg == "" or apg == None): apg = AdminConfig.create("AccessPointGroup", cgbsettings, [["name", "DefaultAccessPointGroup"], ["coreGroupAccessPointRefs", cgaps]]) else: AdminConfig.modify(apg, [["coreGroupAccessPointRefs", cgaps]])
class ProverCache(object): """ generated source for class ProverCache """ contents = Map() def __init__(self): """ generated source for method __init__ """ self.contents = HashMap() # # * NOTE: The given sentence must have been renamed with a VariableRenamer. # def contains(self, renamedSentence): """ generated source for method contains """ return self.contents.containsKey(renamedSentence) def get(self, sentence, varRenamedSentence): """ generated source for method get """ cacheContents = self.contents.get(varRenamedSentence) if cacheContents == None: return None results = HashSet() for answer in cacheContents: results.add(Unifier.unify(sentence, answer)) return ArrayList(results) def put(self, sentence, renamedSentence, answers): """ generated source for method put """ results = HashSet() for answer in answers: results.add(Substituter.substitute(sentence, answer)) self.contents.put(renamedSentence, results)
def logicalComponents(systemNames, solman): r'@types: list[str], saputils.SapSolman -> HashMap[str, list]' namesCount = len(systemNames) pageSize = 50 pageOffset = 0 product2sysname = HashMap() while (pageOffset < namesCount): pageEndPosition = pageOffset + pageSize if pageEndPosition > namesCount: pageEndPosition = namesCount namesForQuery = systemNames[pageOffset:pageEndPosition] pageOffset = pageOffset + pageSize names = ArrayList(len(namesForQuery)) for name in namesForQuery: names.add(name) try: result = solman.execute('SMSY_SYSTEMS', 'SYSTEMNAME, PRODUCT', 'SYSTEMNAME', names) while result.next(): product = result.getString("PRODUCT") system = result.getString("SYSTEMNAME") if saputils.isEmptyValue(product) or saputils.isEmptyValue( system): continue systemsList = product2sysname.get(product.lower()) if systemsList is None: systemsList = ArrayList() product2sysname.put(product.lower(), systemsList) systemsList.add(system.lower()) except (Exception, JException): logger.warnException("Failed to get products") component2system = HashMap() result = solman.execute('SMSY_LOG_COMP', 'LOG_COMP, PRODUCT') while result.next(): product = result.getString("PRODUCT") component = result.getString("LOG_COMP") if saputils.isEmptyValue(product) or saputils.isEmptyValue(component): continue systemsList = product2sysname.get(product.lower()) if systemsList is not None: component2system.put(component.lower(), systemsList) return component2system
def processLinks(allLinksChildren, client, ciDictionary, objectMappings, linkMappings, action, updateStatus): iter = allLinksChildren.iterator() while iter.hasNext(): linkElement = iter.next() end1Id = None end2Id = None table = linkElement.getAttributeValue("targetRelationshipClass") attributesMap = HashMap() fieldChildren = linkElement.getChildren("field") if fieldChildren is not None: iter2 = fieldChildren.iterator() while iter2.hasNext(): fieldElement = iter2.next() fieldName = fieldElement.getAttributeValue("name") fieldValue = fieldElement.getText() if fieldName == "end1Id": end1Id = fieldValue attributesMap.put("END1", ciDictionary.get(fieldValue)) elif fieldName == "end2Id": end2Id = fieldValue attributesMap.put("END2", ciDictionary.get(fieldValue)) elif fieldName != "DiscoveryID1" and fieldName != "DiscoveryID2": attributesMap.put(fieldName, fieldValue) isKey = fieldElement.getAttributeValue("key") end1ExternalId = CmdbObjectID.Factory.restoreObjectID(end1Id) newId1 = end1ExternalId.getPropertyValue("ID") if newId1 is None: cmdb1Id = end1ExternalId.getPropertyValue("internal_id") end1ExternalId = objectMappings.get(cmdb1Id) end2ExternalId = CmdbObjectID.Factory.restoreObjectID(end2Id) newId2 = end2ExternalId.getPropertyValue("ID") if newId2 is None: cmdb2Id = end2ExternalId.getPropertyValue("internal_id") end2ExternalId = objectMappings.get(cmdb2Id) id = linkElement.getAttributeValue("id") linkObjId = CmdbLinkID.Factory.restoreLinkID(id) newId = linkObjId.getPropertyValue("ID") externalId = None if newId is None: cmdbId = linkObjId.getPropertyValue("internal_id") className = linkObjId.getType() newId = str(client.getNextCounter()) attributesMap.put("ID", newId) propArray = [TypesFactory.createProperty("ID", attributesMap.get("ID"))] externalId = ExternalIdFactory.createExternalRelationId( className, end1ExternalId, end2ExternalId, propArray ) linkMappings.put(cmdbId, externalId) else: logger.info("linkObjId is ", linkObjId.getPropertyValue("ID"), " newId: ", newId) externalId = ExternalIdUtil.restoreExternalRelationId(linkObjId.toString()) attributesMap.put("ID", newId) if action == ADD: client.executeInsert(table, attributesMap, updateStatus, externalId) elif action == UPDATE: client.executeUpdate(table, attributesMap, newId, updateStatus, externalId) elif action == DELETE: client.executeDelete(table, attributesMap, newId, updateStatus, externalId)
def logicalComponents(systemNames, solman): r'@types: list[str], saputils.SapSolman -> HashMap[str, list]' namesCount = len(systemNames) pageSize = 50 pageOffset = 0 product2sysname = HashMap() while (pageOffset < namesCount): pageEndPosition = pageOffset + pageSize if pageEndPosition > namesCount: pageEndPosition = namesCount namesForQuery = systemNames[pageOffset:pageEndPosition] pageOffset = pageOffset + pageSize names = ArrayList(len(namesForQuery)) for name in namesForQuery: names.add(name) try: result = solman.execute('SMSY_SYSTEMS', 'SYSTEMNAME, PRODUCT', 'SYSTEMNAME', names) while result.next(): product = result.getString("PRODUCT") system = result.getString("SYSTEMNAME") if saputils.isEmptyValue(product) or saputils.isEmptyValue(system): continue systemsList = product2sysname.get(product.lower()) if systemsList is None: systemsList = ArrayList() product2sysname.put(product.lower(), systemsList) systemsList.add(system.lower()) except (Exception, JException): logger.warnException("Failed to get products") component2system = HashMap() result = solman.execute('SMSY_LOG_COMP', 'LOG_COMP, PRODUCT') while result.next(): product = result.getString("PRODUCT") component = result.getString("LOG_COMP") if saputils.isEmptyValue(product) or saputils.isEmptyValue(component): continue systemsList = product2sysname.get(product.lower()) if systemsList is not None: component2system.put(component.lower(), systemsList) return component2system
def processLinks(allLinksChildren, client, ciDictionary, objectMappings, linkMappings, action, updateStatus): iter = allLinksChildren.iterator() while iter.hasNext(): linkElement = iter.next() end1Id = None end2Id = None table = linkElement.getAttributeValue('targetRelationshipClass') attributesMap = HashMap() fieldChildren = linkElement.getChildren('field') if fieldChildren is not None: iter2 = fieldChildren.iterator() while iter2.hasNext(): fieldElement = iter2.next() fieldName = fieldElement.getAttributeValue('name') fieldValue = fieldElement.getText() if (fieldName == 'end1Id'): end1Id = fieldValue attributesMap.put('END1',ciDictionary.get(fieldValue)) elif (fieldName == 'end2Id'): end2Id = fieldValue attributesMap.put('END2',ciDictionary.get(fieldValue)) elif (fieldName != 'DiscoveryID1' and fieldName != 'DiscoveryID2'): attributesMap.put(fieldName,fieldValue) isKey = fieldElement.getAttributeValue('key') end1ExternalId = CmdbObjectID.Factory.restoreObjectID(end1Id) newId1 = end1ExternalId.getPropertyValue('ID') if (newId1 is None): cmdb1Id = end1ExternalId.getPropertyValue('internal_id') end1ExternalId = objectMappings.get(cmdb1Id) end2ExternalId = CmdbObjectID.Factory.restoreObjectID(end2Id) newId2 = end2ExternalId.getPropertyValue('ID') if (newId2 is None): cmdb2Id = end2ExternalId.getPropertyValue('internal_id') end2ExternalId = objectMappings.get(cmdb2Id) id = linkElement.getAttributeValue('id') linkObjId = CmdbLinkID.Factory.restoreLinkID(id) newId = linkObjId.getPropertyValue('ID') externalId = None if (newId is None): cmdbId = linkObjId.getPropertyValue('internal_id') className = linkObjId.getType() newId = str(client.getNextCounter()) attributesMap.put('ID', newId) propArray = [TypesFactory.createProperty('ID', attributesMap.get('ID'))] externalId = ExternalIdFactory.createExternalRelationId(className, end1ExternalId, end2ExternalId, propArray) linkMappings.put(cmdbId, externalId) else: logger.info('linkObjId is ', linkObjId.getPropertyValue('ID'), ' newId: ', newId) externalId = ExternalIdUtil.restoreExternalRelationId(linkObjId.toString()) attributesMap.put('ID', newId) if(action == ADD): client.executeInsert(table, attributesMap, updateStatus, externalId) elif (action == UPDATE): client.executeUpdate(table, attributesMap, newId, updateStatus, externalId) elif (action == DELETE): client.executeDelete(table, attributesMap, newId, updateStatus, externalId)
def updateAttributes(self, context, configurationAttributes): print "Idp extension. Method: updateAttributes" attributeContext = context.getAttributeContext() customAttributes = HashMap() customAttributes.putAll(attributeContext.getIdPAttributes()) # Remove givenName attribute customAttributes.remove("givenName") # Update surname attribute if customAttributes.containsKey("sn"): customAttributes.get("sn").setValues( ArrayList(Arrays.asList(StringAttributeValue("Dummy")))) # Set updated attributes attributeContext.setIdPAttributes(customAttributes.values()) return True
def getAgates(shellUtils, installpath, sapitsOSH, OSHVResult): mapInstanceNameToAgate = HashMap() filePath = installpath + '\\config\\ItsRegistryWGATE.xml' data = shellUtils.safecat(filePath) logger.debug('got ItsRegistryWGATE file') if data == None or error(data): logger.error('Got: [', data, '] when performing command [ safecat ', filePath, '] - terminating script') else: builder = SAXBuilder(0) doc = builder.build(StringReader(data)) root = doc.getRootElement() localWgates = getElementByAttrValue(root, 'key', 'name', 'LocalWgates') wgates = localWgates.getChildren() it = wgates.iterator() while it.hasNext(): wgate = it.next() value = wgate.getAttributeValue('name') if value.find('WGATE_') >= 0: instancesRoot = getElementByAttrValue(wgate, 'key', 'name', 'Instances') instances = instancesRoot.getChildren() itInstances = instances.iterator() while itInstances.hasNext(): instance = itInstances.next() instanceName = instance.getAttributeValue('name') logger.debug(instanceName) agatesRoot = getElementByAttrValue(instance, 'key', 'name', 'Agates') agates = agatesRoot.getChildren() itAgates = agates.iterator() while itAgates.hasNext(): agate = itAgates.next() agateHost = getElementByAttrValue( agate, 'value', 'name', 'Host') host = agateHost.getText() agates = mapInstanceNameToAgate.get(instanceName) if agates == None: agates = ArrayList() mapInstanceNameToAgate.put(instanceName, agates) try: ip = netutils.getHostAddress(host) hostOSH = modeling.createHostOSH(ip) OSHVResult.add(hostOSH) agateOSH = modeling.createApplicationOSH( 'sap_its_agate', 'ITS_AGATE_' + ip, hostOSH) OSHVResult.add(agateOSH) agates.add(agateOSH) except: logger.warn('Failed resolving IP for agate host ', host) return mapInstanceNameToAgate
class KnowledgeBase(object): """ generated source for class KnowledgeBase """ contents = Map() def __init__(self, description): """ generated source for method __init__ """ self.contents = HashMap() for gdl in description: if not self.contents.containsKey(key): self.contents.put(key, ArrayList()) self.contents.get(key).add(rule) @synchronized def fetch(self, sentence): """ generated source for method fetch """ key = sentence.__name__ if self.contents.containsKey(key): return self.contents.get(key) else: return ArrayList()
def main(): fileType = "" newVars = [] # We will only update values for existing variables. We do not create new ones allExistingVars = releaseApi.getVariables(release.id) # If it is a file type we can process, place in interator filesToProcessItr = filter( lambda x: os.path.splitext(x)[1] in (listOfPropertiesTypes + listOfYamlTypes + listOfXmlTypes), fileNameList) logging.debug("The filtered list = %s" % list(filesToProcessItr)) for fileName in filesToProcessItr: fileType = os.path.splitext(fileName)[1] url = targetURL.replace(":filename:", fileName) logging.debug("The new URL is " + url) data = getData(url) if len(data) == 0: continue if fileType in listOfYamlTypes: newVars = YamlParser.getVariablesList(data) elif fileType in listOfPropertiesTypes: newVars = PropertiesParser.getVariablesList(data) elif fileType in listOfXmlTypes: newVars = XmlParser.getVariablesList(data) logging.debug("Finished with the xml parser") else: # If no data was returned, skip this file logging.error("Unknown file type: " + fileType) sys.exit(1) # put the new vars in a map indexed by key(name) newVarsMap = HashMap() for dynamicVar in newVars.getVariables(): newVarsMap.put(dynamicVar.getKey(), dynamicVar) for var in allExistingVars: # Make sure this is an existing Release Variable newVar = newVarsMap.get(var.key) # If this is from a properites file (where we have no type) or it is from a yaml file and type matches) if newVar and (fileType in listOfPropertiesTypes or newVar.getType() == var.type): var.value = newVar.getValue() releaseApi.updateVariable(var) if var.type != "xlrelease.PasswordStringVariable": print var.key + "=" + str(var.value) + "\n" else: print var.key + "=" + "*******" + "\n"
def getAgates(shellUtils, installpath, sapitsOSH, OSHVResult): mapInstanceNameToAgate = HashMap() filePath = installpath + '\\config\\ItsRegistryWGATE.xml' data = shellUtils.safecat(filePath) logger.debug('got ItsRegistryWGATE file') if data == None or error(data): logger.error('Got: [', data, '] when performing command [ safecat ', filePath, '] - terminating script') else: builder = SAXBuilder(0) doc = builder.build(StringReader(data)) root = doc.getRootElement() localWgates = getElementByAttrValue(root, 'key', 'name', 'LocalWgates') wgates = localWgates.getChildren() it = wgates.iterator() while it.hasNext(): wgate = it.next() value = wgate.getAttributeValue('name') if value.find('WGATE_') >= 0: instancesRoot = getElementByAttrValue(wgate, 'key', 'name', 'Instances') instances = instancesRoot.getChildren() itInstances = instances.iterator() while itInstances.hasNext(): instance = itInstances.next() instanceName = instance.getAttributeValue('name') logger.debug(instanceName) agatesRoot = getElementByAttrValue(instance, 'key', 'name', 'Agates') agates = agatesRoot.getChildren() itAgates = agates.iterator() while itAgates.hasNext(): agate = itAgates.next() agateHost = getElementByAttrValue(agate, 'value', 'name', 'Host') host = agateHost.getText() agates = mapInstanceNameToAgate.get(instanceName) if agates == None: agates = ArrayList() mapInstanceNameToAgate.put(instanceName, agates) try: ip = netutils.getHostAddress(host) hostOSH = modeling.createHostOSH(ip) OSHVResult.add(hostOSH) agateOSH = modeling.createApplicationOSH('sap_its_agate', 'ITS_AGATE_' + ip, hostOSH) OSHVResult.add(agateOSH) agates.add(agateOSH) except: logger.warn('Failed resolving IP for agate host ', host) return mapInstanceNameToAgate
def readDataCSV(fp): reader = CSVReader(FileReader(fp), ",") ls = reader.readAll() tracks = HashMap() for idx, item in enumerate(ls): if idx > 0: if tracks.containsKey(int(item[0])): pass else: track = [[], [], [], []] tracks.put(int(item[0]), track) # trackID in str. track = tracks.get(int(item[0])) track[0].append(int(item[1])) # frame 0 track[1].append(item[3]) # x 1 track[2].append(item[4]) # y 2 roi = Roi(int(item[5]), int(item[6]), int(item[7]), int(item[8])) track[3].append(roi) # a roi 3 return tracks
def getProcesses(client, nameToHostOSH, OSHVResult, schemaName=None, viewSchemaName=None): # [queue id] <--> [ list of process OSH ] mapIDToOSH = HashMap() objs = __assocWithSchemaName(['FND_CONCURRENT_PROCESSES'], schemaName, viewSchemaName) resultSet = client.executeQuery( 'SELECT CONCURRENT_QUEUE_ID,OS_PROCESS_ID,NODE_NAME FROM %s WHERE PROCESS_STATUS_CODE=\'A\' or PROCESS_STATUS_CODE=\'C\'' % objs) #@@CMD_PERMISION sql protocol execution while resultSet.next(): queueID = resultSet.getString(1) PID = resultSet.getString(2) host = resultSet.getString(3) hostNoDomain = host.split('.')[0] if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug('queueID = ', queueID) logger.debug('PID = ', PID) logger.debug('host = ', host) logger.debug('hostNoDomain = ', hostNoDomain) logger.debug('-------------------------------------------------') hostOSH = nameToHostOSH.get(host) if hostOSH == None: hostOSH = nameToHostOSH.get(hostNoDomain) if hostOSH != None: processOSH = ObjectStateHolder('process') processOSH.setAttribute('process_pid', int(PID)) processOSH.setContainer(hostOSH) OSHVResult.add(processOSH) processes = mapIDToOSH.get(queueID) if processes == None: processes = ArrayList() mapIDToOSH.put(queueID, processes) processes.add(processOSH) if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug(mapIDToOSH) logger.debug('-------------------------------------------------') resultSet.close() return mapIDToOSH
def sumBalancesByCurrency(self, book): # print "sumBalancesByCurrency " import java.util.HashMap as HashMap import com.infinitekind.moneydance.model.AccountUtil as AccountUtil import com.infinitekind.moneydance.model.AcctFilter as AcctFilter import com.infinitekind.moneydance.model.Account.AccountType as AccountType totals = HashMap() # HashMap<CurrencyType, Long> for acct in AccountUtil.allMatchesForSearch(book.getRootAccount(), AcctFilter.ACTIVE_ACCOUNTS_FILTER ): curr = acct.getCurrencyType() total = totals.get(curr) # this returns None if curr doesn't exist yet if acct.getCurrentBalance() != 0 and acct.getAccountType() == AccountType.SECURITY: # we only want Securities with holdings pass else: continue # no sense slowing everything down with stuff we don't need total = (0L if (total == None) else total) + acct.getCurrentBalance() #? java total = ((total == null) ? 0L : total) + acct.getCurrentBalance(); totals.put(curr, total) return totals
def processCis(allObjectChildren, client, ciDictionary, objectMappings, action, updateStatus): iter = allObjectChildren.iterator() while iter.hasNext(): objectElement = iter.next() table = objectElement.getAttributeValue('name') id = objectElement.getAttributeValue('id') mode = objectElement.getAttributeValue('mode') operation = objectElement.getAttributeValue('operation') attributesMap = HashMap() fieldChildren = objectElement.getChildren('field') if fieldChildren is not None: iter2 = fieldChildren.iterator() while iter2.hasNext(): fieldElement = iter2.next() fieldName = fieldElement.getAttributeValue('name') fieldValue = fieldElement.getText() attributesMap.put(fieldName,fieldValue) isKey = fieldElement.getAttributeValue('key') objId = CmdbObjectID.Factory.restoreObjectID(id) newId = objId.getPropertyValue(ID_KEY) externalId = None if (newId is None): #if this is CMDB id cmdbId = objId.getPropertyValue('internal_id') newId = str(client.getNextCounter()) attributesMap.put('ID', newId) propArray = [TypesFactory.createProperty(ID_KEY, attributesMap.get(ID_KEY))] className = objId.getType() externalId = ExternalIdFactory.createExternalCiId(className, propArray) objectMappings.put(cmdbId, externalId) else: logger.info('objId is external and objId.getPropertyValue is ', newId) externalId = ExternalIdUtil.restoreExternalCiId(objId.toString()) attributesMap.put('ID', newId) ciDictionary.put(id, newId) if (action == ADD): client.executeInsert(table, attributesMap, updateStatus, externalId) elif (action == UPDATE): client.executeUpdate(table, attributesMap, newId, updateStatus, externalId) elif (action == DELETE): client.executeDelete(table, attributesMap, newId, updateStatus, externalId)
def processCis(allObjectChildren, client, ciDictionary, objectMappings, action, updateStatus): iter = allObjectChildren.iterator() while iter.hasNext(): objectElement = iter.next() table = objectElement.getAttributeValue("name") id = objectElement.getAttributeValue("id") mode = objectElement.getAttributeValue("mode") operation = objectElement.getAttributeValue("operation") attributesMap = HashMap() fieldChildren = objectElement.getChildren("field") if fieldChildren is not None: iter2 = fieldChildren.iterator() while iter2.hasNext(): fieldElement = iter2.next() fieldName = fieldElement.getAttributeValue("name") fieldValue = fieldElement.getText() attributesMap.put(fieldName, fieldValue) isKey = fieldElement.getAttributeValue("key") objId = CmdbObjectID.Factory.restoreObjectID(id) newId = objId.getPropertyValue(ID_KEY) externalId = None if newId is None: # if this is CMDB id cmdbId = objId.getPropertyValue("internal_id") newId = str(client.getNextCounter()) attributesMap.put("ID", newId) propArray = [TypesFactory.createProperty(ID_KEY, attributesMap.get(ID_KEY))] className = objId.getType() externalId = ExternalIdFactory.createExternalCiId(className, propArray) objectMappings.put(cmdbId, externalId) else: logger.info("objId is external and objId.getPropertyValue is ", newId) externalId = ExternalIdUtil.restoreExternalCiId(objId.toString()) attributesMap.put("ID", newId) ciDictionary.put(id, newId) if action == ADD: client.executeInsert(table, attributesMap, updateStatus, externalId) elif action == UPDATE: client.executeUpdate(table, attributesMap, newId, updateStatus, externalId) elif action == DELETE: client.executeDelete(table, attributesMap, newId, updateStatus, externalId)
def getCurrencyAccounts(self, book): # print "getCurrencyAccounts " import java.util.HashMap as HashMap import com.infinitekind.moneydance.model.AccountUtil as AccountUtil import com.infinitekind.moneydance.model.AcctFilter as AcctFilter import com.infinitekind.moneydance.model.Account.AccountType as AccountType accounts = HashMap() for acct in AccountUtil.allMatchesForSearch(book.getRootAccount(), AcctFilter.ACTIVE_ACCOUNTS_FILTER ): curr = acct.getCurrencyType() account = accounts.get(curr)# this returns None if curr doesn't exist yet if acct.getCurrentBalance() != 0 and acct.getAccountType() == AccountType.SECURITY: pass else: continue # no sense slowing everything down with stuff we don't need . only some BONDS left mixed in with the STOCK if account == None: account = str(acct.getParentAccount()) else: account = account + ' : ' + str(acct.getParentAccount()) # concatinate two strings here accounts.put(curr,account) return accounts
def getProcesses(client,nameToHostOSH, OSHVResult, schemaName = None, viewSchemaName = None): # [queue id] <--> [ list of process OSH ] mapIDToOSH = HashMap() objs = __assocWithSchemaName(['FND_CONCURRENT_PROCESSES'], schemaName, viewSchemaName) resultSet = client.executeQuery('SELECT CONCURRENT_QUEUE_ID,OS_PROCESS_ID,NODE_NAME FROM %s WHERE PROCESS_STATUS_CODE=\'A\' or PROCESS_STATUS_CODE=\'C\'' % objs)#@@CMD_PERMISION sql protocol execution while resultSet.next(): queueID = resultSet.getString(1) PID = resultSet.getString(2) host = resultSet.getString(3) hostNoDomain = host.split('.')[0] if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug('queueID = ', queueID) logger.debug('PID = ', PID) logger.debug('host = ', host) logger.debug('hostNoDomain = ', hostNoDomain) logger.debug('-------------------------------------------------') hostOSH = nameToHostOSH.get(host) if hostOSH == None: hostOSH = nameToHostOSH.get(hostNoDomain) if hostOSH != None: processOSH = ObjectStateHolder('process') processOSH.setAttribute('process_pid', int(PID)) processOSH.setContainer(hostOSH) OSHVResult.add(processOSH) processes = mapIDToOSH.get(queueID) if processes == None: processes = ArrayList() mapIDToOSH.put(queueID,processes) processes.add(processOSH) if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug(mapIDToOSH) logger.debug('-------------------------------------------------') resultSet.close() return mapIDToOSH
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() ms_domain_name = Framework.getDestinationAttribute('ms_domain_name') if not ms_domain_name: ms_domain_name = 'NULL' try: netUtil = MsNetworkUtil() hostsOutput = netUtil.doNetServerEnum('NULL', SV_TYPE_SERVER, ms_domain_name) if hostsOutput != None: discoverUnknownIPs = 1 try: strDiscoverUnknownIPs = Framework.getParameter( 'discoverUnknownIPs') discoverUnknownIPs = Boolean.parseBoolean( strDiscoverUnknownIPs) except: pass oshMsDomain = ObjectStateHolder('msdomain') oshMsDomain.setStringAttribute('data_name', ms_domain_name) alreadyDiscoveredIps = HashMap() for hostInfo in hostsOutput: hostType = Long(hostInfo[1]).longValue() hostName = (str(hostInfo[0])).lower() try: ip = InetAddress.getByName(hostInfo[0]).getHostAddress() if netutils.isLocalIp(ip): continue cachedHostName = alreadyDiscoveredIps.get(ip) if cachedHostName != None: logger.debug( 'IP ', ip, ' already reported for host ' + cachedHostName, ' current host ', hostName, ' - skipping') continue else: logger.debug('Discovered IP ' + ip + ' for host ' + hostName) alreadyDiscoveredIps.put(ip, hostName) ipDomain = DomainScopeManager.getDomainByIp(ip) if not discoverUnknownIPs and ipDomain == 'unknown': logger.debug( 'ip: ' + ip + ' is out of probe range and will be excluded') continue if SV_TYPE_CLUSTER_NT & hostType: logger.debug( 'Not reporting the entry %s because it is a Cluster' % hostName) continue hostOsType = 'nt' if SV_TYPE_SERVER_UNIX & hostType: hostOsType = 'unix' oshHost = modeling.createHostOSH(ip, hostOsType) oshHost.setStringAttribute("host_hostname", hostName) OSHVResult.add(oshHost) link = modeling.createLinkOSH('member', oshMsDomain, oshHost) OSHVResult.add(link) ipOSH = modeling.createIpOSH(ip) OSHVResult.add(ipOSH) contained = modeling.createLinkOSH('contained', oshHost, ipOSH) OSHVResult.add(contained) except: errorMsg = str(sys.exc_info()[1]).strip() logger.warn('Failed to resolve host ', hostInfo[0], ' : ', errorMsg) else: message = 'Failed to discover hosts on MS Domain' logger.warn(message) logger.reportWarning(message) except: errorMsg = str(sys.exc_info()[1]).strip() logger.errorException('Failed to discovery MS Domains') errorMessage = errormessages.makeErrorMessage( "msdomain", errorMsg, errormessages.ERROR_FAILED_DISCOVERING_MSDOMAIN_HOSTS) errobj = errorobject.createError( errorcodes.FAILED_DISCOVERIING_MSDOMAIN_HOST, ["msdomain", errorMsg], errorMessage) logger.reportErrorObject(errobj) return OSHVResult
def getTransactionsInfo(self, transactions): mapTransactionToInfo = HashMap() mapProgramToTransaction = HashMap() if (transactions == None) or (len(transactions) == 0): logger.info("getTransactionsInfo: transactions list is empty") return mapTransactionToInfo transactionsRS = self.__client.executeQuery('TSTC', '', 'TCODE', transactions, 'TCODE,PGMNA,DYPNO')#@@CMD_PERMISION sap protocol execution while transactionsRS.next(): transaction = transactionsRS.getString("TCODE") program = transactionsRS.getString("PGMNA") screen = transactionsRS.getString("DYPNO") if logger.isDebugEnabled(): logger.debug("-------------------------------------------------------") logger.debug("getTransactionsInfo: transaction = " + transaction) logger.debug("getTransactionsInfo: program = " + program) logger.debug("getTransactionsInfo: screen = " + screen) logger.debug("-------------------------------------------------------") if (program == None) or (len(program) == 0): program = "N/A" logger.info("getTransactionsInfo: program for transaction [" + str(transaction) + "] is no available - setting to N/A.") info = TransactionInfo(transaction,program,screen) mapTransactionToInfo.put(transaction,info) transForProgram = mapProgramToTransaction.get(program) if transForProgram == None: transForProgram = ArrayList() mapProgramToTransaction.put(program,transForProgram) transForProgram.add(transaction) if logger.isDebugEnabled(): logger.debug("getTransactionsInfo: mapProgramToTransaction = " + str(mapProgramToTransaction)) if len(mapProgramToTransaction) == 0: logger.info("getTransactionsInfo: failed to get programs for transactions " + str(transactions)) return mapProgramToTransaction objNames = ArrayList(mapProgramToTransaction.keySet()) objNames.addAll(mapTransactionToInfo.keySet()) programsRS = self.__client.executeQuery('TADIR', "(OBJECT = 'PROG' OR OBJECT = 'TRAN') AND ", 'OBJ_NAME', objNames, 'OBJECT,OBJ_NAME,VERSID,DEVCLASS')#@@CMD_PERMISION sap protocol execution while programsRS.next(): objectType = programsRS.getString("OBJECT") if objectType == "PROG": program = programsRS.getString("OBJ_NAME") version = programsRS.getString("VERSID") transForProgram = mapProgramToTransaction.get(program) if transForProgram != None: for ti in transForProgram: info = mapTransactionToInfo.get(ti) if info == None: logger.info("program: Failed to find info for transaction [" + str(transaction) + "]") else: info.version = version else: logger.info("getTransactionsInfo: failed getting transactions for program [" + str(program) + "]") else: # transaction devclass = programsRS.getString("DEVCLASS"); transaction = programsRS.getString("OBJ_NAME") info = mapTransactionToInfo.get(transaction) if info == None: logger.info("transaction: Failed to find info for transaction [" + str(transaction) + "]") info = TransactionInfo(transaction,"N/A","") mapTransactionToInfo.put(transaction,info) info.devclass = devclass if logger.isDebugEnabled(): logger.debug("--------------------------------------------------") logger.debug("getTransactionsInfo: returning transaction info " + str(mapTransactionToInfo)) logger.debug("--------------------------------------------------") return mapTransactionToInfo
class IndexData: def __activate__(self, context): # Prepare variables self.index = context["fields"] self.object = context["object"] self.payload = context["payload"] self.params = context["params"] self.utils = context["pyUtils"] self.config = context["jsonConfig"] self.log = context["log"] self.last_modified = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) self.log.debug("Indexing Metadata Record '{}' '{}'", self.object.getId(), self.payload.getId()) # Common data self.__newDoc() self.packagePid = None pidList = self.object.getPayloadIdList() for pid in pidList: if pid.endswith(".tfpackage"): self.packagePid = pid # Real metadata if self.itemType == "object": self.__basicData() self.__metadata() # Some of the above steps may request some # messages be sent, particularly workflows self.__messages() # Make sure security comes after workflows self.__security() def __newDoc(self): self.oid = self.object.getId() self.pid = self.payload.getId() metadataPid = self.params.getProperty("metaPid", "DC") self.utils.add(self.index, "storage_id", self.oid) if self.pid == metadataPid: self.itemType = "object" else: self.oid += "/" + self.pid self.itemType = "datastream" self.utils.add(self.index, "identifier", self.pid) self.utils.add(self.index, "id", self.oid) self.utils.add(self.index, "item_type", self.itemType) self.utils.add(self.index, "last_modified", self.last_modified) self.utils.add(self.index, "harvest_config", self.params.getProperty("jsonConfigOid")) self.utils.add(self.index, "harvest_rules", self.params.getProperty("rulesOid")) self.item_security = [] self.owner = self.params.getProperty("owner", "guest") def __basicData(self): self.utils.add(self.index, "repository_name", self.params["repository.name"]) self.utils.add(self.index, "repository_type", self.params["repository.type"]) # VITAL integration vitalPid = self.params["vitalPid"] if vitalPid is not None: self.utils.add(self.index, "vitalPid", vitalPid) # Persistent Identifiers pidProperty = self.config.getString(None, ["curation", "pidProperty"]) if pidProperty is None: self.log.error("No configuration found for persistent IDs!") else: pid = self.params[pidProperty] if pid is not None: self.utils.add(self.index, "known_ids", pid) self.utils.add(self.index, "pidProperty", pid) self.utils.add(self.index, "oai_identifier", pid) self.utils.add(self.index, "oai_set", "default") # Publication published = self.params["published"] if published is not None: self.utils.add(self.index, "published", "true") def __security(self): # Security roles = self.utils.getRolesWithAccess(self.oid) if roles is not None: # For every role currently with access for role in roles: # Should show up, but during debugging we got a few if role != "": if role in self.item_security: # They still have access self.utils.add(self.index, "security_filter", role) else: # Their access has been revoked self.__revokeRoleAccess(role) # Now for every role that the new step allows access for role in self.item_security: if role not in roles: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) # No existing security else: if self.item_security is None: # Guest access if none provided so far self.__grantRoleAccess("guest") self.utils.add(self.index, "security_filter", role) else: # Otherwise use workflow security for role in self.item_security: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) users = self.utils.getUsersWithAccess(self.oid) if users is not None: # For every role currently with access for user in users: self.utils.add(self.index, "security_exception", user) # Ownership if self.owner is None: self.utils.add(self.index, "owner", "system") else: self.utils.add(self.index, "owner", self.owner) def __indexList(self, name, values): # convert to set so no duplicate values for value in HashSet(values): self.utils.add(self.index, name, value) def __grantRoleAccess(self, newRole): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("role", newRole) self.utils.setAccessSchema(schema, "derby") def __grantUserAccess(self, newUser): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("user", newUser) self.utils.setAccessSchema(schema, "derby") def __revokeRoleAccess(self, oldRole): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("role", oldRole) self.utils.removeAccessSchema(schema, "derby") def __revokeUserAccess(self, oldUser): schema = self.utils.getAccessSchema("derby"); schema.setRecordId(self.oid) schema.set("user", oldUser) self.utils.removeAccessSchema(schema, "derby") def __metadata(self): self.title = None self.dcType = None self.descriptionList = [] self.creatorList = [] self.creationDate = [] self.contributorList = [] self.approverList = [] self.formatList = ["application/x-fascinator-package"] self.fulltext = [] self.relationDict = {} self.customFields = {} self.creatorFullNameMap = HashMap() self.grantNumberList = [] self.arrayBucket = HashMap() self.compFields = ["dc:coverage.vivo:DateTimeInterval", "locrel:prc.foaf:Person"] self.compFieldsConfig = {"dc:coverage.vivo:DateTimeInterval":{"delim":" to ","start":"start","end":"end"},"locrel:prc.foaf:Person":{"delim":", ","start":"familyName","end":"givenName"} } self.reportingFieldPrefix = "reporting_" self.embargoedDate = None # Try our data sources, order matters self.__workflow() # Some defaults if the above failed if self.title is None: self.title = "New Dataset" if self.formatList == []: source = self.object.getPayload(self.packagePid) self.formatList.append(source.getContentType()) # Index our metadata finally self.utils.add(self.index, "dc_title", self.title) if self.dcType is not None: self.utils.add(self.index, "dc_type", self.dcType) self.__indexList("dc_creator", self.creatorList) #no dc_author in schema.xml, need to check self.__indexList("dc_contributor", self.contributorList) self.__indexList("dc_description", self.descriptionList) self.__indexList("dc_format", self.formatList) self.__indexList("dc_date", self.creationDate) self.__indexList("full_text", self.fulltext) for key in self.customFields: self.__indexList(key, self.customFields[key]) for key in self.relationDict: self.__indexList(key, self.relationDict[key]) if self.arrayBucket.size() > 0: for arrFldName in self.arrayBucket.keySet(): if arrFldName.endswith("Person") or arrFldName.replace(self.reportingFieldPrefix, "") in self.compFields: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName).values()) else: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName)) if self.embargoedDate is not None: self.utils.add(self.index, "date_embargoed", self.embargoedDate+"T00:00:00Z") def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString(None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = ["title", "description", "manifest", "metaList", "relationships", "responses"] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith("skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len(arrParts) >= 2 and field.find(":Relationship.") == -1 and field.find("dc:type") == -1 and field.find("redbox:origin") == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx+len(arrParts[1])+1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] self.log.debug("Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix +compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith(self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % (fullField, self.compFieldsConfig[compfield]["delim"] ,value) if field.endswith(self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug("Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true") def __messages(self): if self.message_list is not None and len(self.message_list) > 0: msg = JsonSimple() msg.getJsonObject().put("oid", self.oid) message = msg.toString() for target in self.message_list: self.utils.sendMessage(target, message) def __getJsonPayload(self, pid): payload = self.object.getPayload(pid) json = self.utils.getJsonObject(payload.open()) payload.close() return json
def transactionChange(sapUtils, mapDevcToOSH, siteOSH, SYSNAME, OSHVResult): mapTransportToOSH = HashMap() transactionChange = sapUtils.getTransactionChange() count = transactionChange.getRowCount() for row in range(count): # read all fields transaction = transactionChange.getCell(row, 0) devc = transactionChange.getCell(row, 1) objectName = transactionChange.getCell(row, 2) objectType = transactionChange.getCell(row, 3) objectDescription = transactionChange.getCell(row, 4) changeDescription = transactionChange.getCell(row, 5) date = transactionChange.getCell(row, 6) time = transactionChange.getCell(row, 7) user = transactionChange.getCell(row, 8) status = transactionChange.getCell(row, 9) changeRequest = transactionChange.getCell(row, 10) program = transactionChange.getCell(row, 12) screen = transactionChange.getCell(row, 13) programVersion = transactionChange.getCell(row, 14) targetSystem = transactionChange.getCell(row, 15) if logger.isDebugEnabled(): logger.debug('--------------------------------------------') logger.debug('changeDescription = ', changeDescription) logger.debug('objectType = ', objectType) logger.debug('objectName = ', objectName) logger.debug('objectDescription = ', objectDescription) logger.debug('date = ', date) logger.debug('time = ', time) logger.debug('user = '******'--------------------------------------------') sfDate = SimpleDateFormat('yyyy-MM-dd HH:mm:ss') dateObj = sfDate.parse(date + ' ' + time, ParsePosition(0)) if devc is not None: devcOSH = mapDevcToOSH.get(devc) if devcOSH != None: # In case the application components were filtered then we need to send only the relevant # application components for this transport addAppCompHierarchy(devcOSH, OSHVResult) transactionOSH = buildTransaction(transaction, devc, program, screen, programVersion, devcOSH, siteOSH, SYSNAME, OSHVResult) ticketStatus = '' # # L - Modifiable # D - Modifiable, Protected if status == 'L' or status == 'D': # (1) = Plan # (9) = Critical ticketStatus = 'In progress' else: # (2) = New change # (7) = Major ticketStatus = 'Closed' transportOSH = mapTransportToOSH.get(changeRequest) if transportOSH == None: transportOSH = buildTransport(changeRequest, dateObj, time, user, targetSystem, changeDescription, ticketStatus, siteOSH, OSHVResult) mapTransportToOSH.put(changeRequest, transportOSH) OSHVResult.add( modeling.createLinkOSH('contains', transactionOSH, transportOSH)) changeOSH = createTransportChange(transaction, objectType, objectName, transportOSH, OSHVResult) OSHVResult.add( modeling.createLinkOSH('use', changeOSH, transactionOSH)) else: logger.warn('can not find devclass OSH for [', devc, ']')
class Linker(object): """ generated source for class Linker """ CONSTANT = re.compile("[A-Z_][A-Z0-9_]*") # Map of file-path to semantic styles & links for that path. # # * Constructor. # * # * @param root the root of the directory tree being indexed # * @param outdir the html output directory # def __init__(self, root=None, outdir=None): """ generated source for method __init__ """ self.rootPath = root self.outDir = outdir self.fileStyles = HashMap() self.outDir = None #File() self.rootPath = str() self.seenDef = HashSet() self.seenRef = HashSet() def findLinks(self, analyzer): """ generated source for method findLinks """ _.msg("Adding xref links") progress = FancyProgress(len(analyzer.getAllBindings()), 50) for b in analyzer.getAllBindings(): self.addSemanticStyles(b) self.processDef(b) progress.tick() # highlight definitions _.msg("\nAdding ref links") progress = FancyProgress(len(analyzer.getReferences()), 50) for e in analyzer.getReferences().items(): self.processRef(e[0], e[1]) progress.tick() # for (List<Diagnostic> ld: analyzer.semanticErrors.values()) { # for (Diagnostic d: ld) { # processDiagnostic(d); # } # } # for (List<Diagnostic> ld: analyzer.parseErrors.values()) { # for (Diagnostic d: ld) { # processDiagnostic(d); # } # } def processDef(self, binding): """ generated source for method processDef """ _hash = binding.hashCode() if binding.isURL() or binding.getStart() < 0 or _hash in self.seenDef: return self.seenDef.add(_hash) style = StyleRun(StyleRun.Type.ANCHOR, binding.getStart(), binding.getLength()) style.message = binding.getType().__str__() style.url = binding.getQname() style.id = str(abs(binding.hashCode())) refs = binding.getRefs() style.highlight = list() for r in refs: style.highlight.append(str(abs(r.hashCode()))) self.addFileStyle(binding.getFile(), style) def processRef(self, ref, bindings): """ generated source for method processRef """ _hash = ref.hashCode() if not _hash in self.seenRef: self.seenRef.add(_hash) link = StyleRun(StyleRun.Type.LINK, ref.start(), ref.length()); link.id = str(abs(_hash)) typings = list() for b in bindings: typings.append(b.getType().__str__()) if len(typings): if len(typings) > 1: link.message = _.joinWithSep(typings, " | ", "{", "}") else: link.message = typings[0] else: link.message = '' link.highlight = list() for b in bindings: link.highlight.append(str(abs(b.hashCode()))) # Currently jump to the first binding only. Should change to have a # hover menu or something later. path = ref.getFile(); for b in bindings: if link.url is None: link.url = self.toURL(b, path) if link.url is not None: self.addFileStyle(path, link) break # # * Returns the styles (links and extra styles) generated for a given file. # * # * @param path an absolute source path # * @return a possibly-empty list of styles for that path # def getStyles(self, path): return self.stylesForFile(path) def stylesForFile(self, path): styles = self.fileStyles.get(path) if styles is None: styles = list() self.fileStyles[path] = styles return styles def addFileStyle(self, path, style): self.stylesForFile(path).append(style) # # * Add additional highlighting styles based on information not evident from # * the AST. # def addSemanticStyles(self, nb): isConst = self.CONSTANT.match(nb.__class__.__name__) is not None if nb.getKind()==SCOPE: if isConst: self.addSemanticStyle(nb, StyleRun.Type.CONSTANT) elif nb.getKind()==VARIABLE: self.addSemanticStyle(nb, StyleRun.Type.CONSTANT if isConst else StyleRun.Type.IDENTIFIER) elif nb.getKind()==PARAMETER: self.addSemanticStyle(nb, StyleRun.Type.PARAMETER) elif nb.getKind()==CLASS: self.addSemanticStyle(nb, StyleRun.Type.TYPE_NAME) def addSemanticStyle(self, binding, type_): path = binding.getFile() if binding.getStart() is None or binding.getEnd() is None: print('invalid binding: ' + str(binding)) return if path is not None: self.addFileStyle(path, StyleRun(type_, binding.getStart(), binding.getLength())) def processDiagnostic(self, d): style = StyleRun(StyleRun.Type.WARNING, d.start, d.end - d.start) style.message = d.msg style.url = d.file_ self.addFileStyle(d.file_, style) def toURL(self, binding, filename): if binding.isBuiltin(): return binding.getURL() destPath = str() if binding.getType().isModuleType(): destPath = binding.getType().asModuleType().getFile() else: destPath = binding.getFile() if destPath is None: return None anchor = "#" + binding.getQname() if binding.getFirstFile() == filename: return anchor if destPath.startswith(self.rootPath): if filename is not None: relpath = _.relPath(filename, destPath) else: relpath = destPath if relpath is not None: return relpath + ".html" + anchor else: return anchor else: return "file://" + destPath + anchor
def transactionChange(sapUtils, mapDevcToOSH, siteOSH, SYSNAME, OSHVResult): mapTransportToOSH = HashMap() transactionChange = sapUtils.getTransactionChange() count = transactionChange.getRowCount() for row in range(count): # read all fields transaction = transactionChange.getCell(row,0) devc = transactionChange.getCell(row,1) objectName = transactionChange.getCell(row,2) objectType = transactionChange.getCell(row,3) objectDescription = transactionChange.getCell(row,4) changeDescription = transactionChange.getCell(row,5) date = transactionChange.getCell(row,6) time = transactionChange.getCell(row,7) user = transactionChange.getCell(row,8) status = transactionChange.getCell(row,9) changeRequest = transactionChange.getCell(row,10) program = transactionChange.getCell(row,12) screen = transactionChange.getCell(row,13) programVersion = transactionChange.getCell(row,14) targetSystem = transactionChange.getCell(row,15) if logger.isDebugEnabled(): logger.debug('--------------------------------------------') logger.debug('changeDescription = ', changeDescription) logger.debug('objectType = ', objectType) logger.debug('objectName = ', objectName) logger.debug('objectDescription = ', objectDescription) logger.debug('date = ', date) logger.debug('time = ', time) logger.debug('user = '******'--------------------------------------------') sfDate = SimpleDateFormat('yyyy-MM-dd HH:mm:ss') dateObj = sfDate.parse(date + ' ' + time,ParsePosition(0)) if devc is not None: devcOSH = mapDevcToOSH.get(devc) if devcOSH != None: # In case the application components were filtered then we need to send only the relevant # application components for this transport addAppCompHierarchy(devcOSH, OSHVResult) transactionOSH = buildTransaction(transaction, devc, program, screen, programVersion, devcOSH, siteOSH, SYSNAME, OSHVResult) ticketStatus = '' # # L - Modifiable # D - Modifiable, Protected if status == 'L' or status == 'D': # (1) = Plan # (9) = Critical ticketStatus = 'In progress' else: # (2) = New change # (7) = Major ticketStatus = 'Closed' transportOSH = mapTransportToOSH.get(changeRequest) if transportOSH == None: transportOSH = buildTransport(changeRequest,dateObj,time,user,targetSystem,changeDescription,ticketStatus, siteOSH, OSHVResult) mapTransportToOSH.put(changeRequest,transportOSH) OSHVResult.add(modeling.createLinkOSH('contains',transactionOSH,transportOSH)); changeOSH = createTransportChange(transaction,objectType,objectName,transportOSH, OSHVResult) OSHVResult.add(modeling.createLinkOSH('use',changeOSH,transactionOSH)); else: logger.warn('can not find devclass OSH for [', devc, ']')
def infrastructure(client, OSHVResult, Framework, schemaName=None, viewSchemaName=None): retOSHs = ArrayList(4) retOSHs.add(None) retOSHs.add(None) retOSHs.add(None) retOSHs.add(None) systemOSH = ObjectStateHolder('oraclesystem') systemOSH.setAttribute('data_name', client.getSid()) systemOSH.setAttribute('oraclesystem_dbaddress', client.getIpAddress()) modeling.setAppSystemVendor(systemOSH) webServerOSH = None nameToHostOSH = HashMap() hostToServerOSH = HashMap() hostToIpAddress = HashMap() databasesOSH = HashMap() resultSet = None try: objs = __assocWithSchemaName(['FND_OAM_APP_SYS_STATUS'], schemaName, viewSchemaName) # query a special table that holds Applications System Status related information resultSet = client.executeQuery( 'SELECT * FROM %s' % objs) #@@CMD_PERMISION sql protocol execution except: logger.debugException( 'SQL query failure. "SELECT * FROM FND_OAM_APP_SYS_STATUS"') Framework.reportWarning('No Oracle E-Business Suite components found.') if resultSet: OSHVResult.add(systemOSH) retOSHs.set(0, systemOSH) else: return None while resultSet.next(): name = resultSet.getString(1) dbSid = resultSet.getString(4) status = resultSet.getString(6) host = resultSet.getString(7) port = client.getPort() if logger.isDebugEnabled(): logger.debug('-----------------------------') logger.debug('name = ', name) logger.debug('status = ', status) if host != None: logger.debug('host = ', host) else: logger.debug('skipping Application system with None host') continue logger.debug('-----------------------------') hostOSH = nameToHostOSH.get(host) serverOSH = hostToServerOSH.get(host) hostIP = hostToIpAddress.get(host) if not hostIP: hostIP = netutils.getHostAddress(host, host) if hostOSH == None and netutils.isValidIp(hostIP): hostOSH = modeling.createHostOSH(hostIP) OSHVResult.add(hostOSH) nameToHostOSH.put(host, hostOSH) hostToIpAddress.put(host, hostIP) if hostOSH == None: logger.warn('Failed to created host [', host, ']') continue if serverOSH == None: serverOSH = modeling.createJ2EEServer('oracleias', hostIP, None, hostOSH, host) OSHVResult.add(serverOSH) hostToServerOSH.put(host, serverOSH) serverMemberOSH = modeling.createLinkOSH('member', systemOSH, serverOSH) OSHVResult.add(serverMemberOSH) if name.find('WEB_SERVER') == 0 and host != None: webServerOSH = serverOSH serverOSH.setBoolAttribute('oracleias_web', 1) elif name.find('FORMS_SERVER') == 0 and host != None: serverOSH.setBoolAttribute('oracleias_form', 1) elif name.find('ADMIN_SERVER') == 0 and host != None: serverOSH.setBoolAttribute('oracleias_admin', 1) elif name.find('CP_SERVER') == 0 and host != None: serverOSH.setBoolAttribute('oracleias_concurrentprocessing', 1) elif name.find('DATABASE') == 0 and host != None: dbOSH = modeling.createDatabaseOSH('oracle', dbSid, port, hostIP, hostOSH) OSHVResult.add(dbOSH) databasesOSH.put(dbSid, dbOSH) memberOSH = modeling.createLinkOSH('member', systemOSH, dbOSH) OSHVResult.add(memberOSH) resultSet.close() try: systemMetrics(client, systemOSH, webServerOSH, OSHVResult, schemaName, viewSchemaName) except: logger.debug("Failed to get system metrics") retOSHs.set(1, hostToServerOSH) retOSHs.set(2, nameToHostOSH) retOSHs.set(3, databasesOSH) return retOSHs
class PropNetConverter(object): """ generated source for class PropNetConverter """ # An archive of Propositions, indexed by name. propositions = Map() # An archive of Components. components = Set() # # * Converts a game description to a PropNet using the following process # * (note that this method and all of the methods that it invokes assume that # * <tt>description</tt> has already been flattened by a PropNetFlattener): # * <ol> # * <li>Transforms each of the rules in <tt>description</tt> into # * equivalent PropNet Components.</li> # * <li>Adds or gates to Propositions with more than one input.</li> # * <li>Adds inputs that are implicitly specified by <tt>description</tt>.</li> # * </ol> # * # * @param description # * A game description. # * @return An equivalent PropNet. # def convert(self, roles, description): """ generated source for method convert """ self.propositions = HashMap() self.components = HashSet() for rule in description: if rule.arity() > 0: convertRule(rule) else: convertStatic(rule.getHead()) fixDisjunctions() addMissingInputs() return PropNet(roles, self.components) # # * Creates an equivalent InputProposition for every LegalProposition where # * none already exists. # def addMissingInputs(self): """ generated source for method addMissingInputs """ addList = ArrayList() for proposition in propositions.values(): if isinstance(, (GdlRelation, )): if relation.__name__.getValue() == "legal": addList.add(proposition) for addItem in addList: self.components.add(getProposition(GdlPool.getRelation(GdlPool.getConstant("does"), relation.getBody()))) def convertConjunct(self, literal): """ generated source for method convertConjunct """ if isinstance(literal, (GdlDistinct, )): link(constant, proposition) self.components.add(proposition) self.components.add(constant) return proposition elif isinstance(literal, (GdlNot, )): link(input, no) link(no, output) self.components.add(input) self.components.add(no) self.components.add(output) return output else: self.components.add(proposition) return proposition def convertHead(self, sentence): """ generated source for method convertHead """ if sentence.__name__.getValue() == "next": link(preTransition, transition) link(transition, head) self.components.add(head) self.components.add(transition) self.components.add(preTransition) return preTransition else: self.components.add(proposition) return proposition def convertRule(self, rule): """ generated source for method convertRule """ head = self.convertHead(rule.getHead()) and_ = And() link(and_, head) self.components.add(head) self.components.add(and_) for literal in rule.getBody(): link(conjunct, and_) def convertStatic(self, sentence): """ generated source for method convertStatic """ if sentence.__name__.getValue() == "init": link(init, transition) link(transition, proposition) self.components.add(init) self.components.add(transition) self.components.add(proposition) constant = Constant(True) proposition = getProposition(sentence) link(constant, proposition) self.components.add(constant) self.components.add(proposition) def fixDisjunctions(self): """ generated source for method fixDisjunctions """ fixList = ArrayList() for proposition in propositions.values(): if proposition.getInputs().size() > 1: fixList.add(proposition) for fixItem in fixList: for input in fixItem.getInputs(): i += 1 if isinstance(, (GdlProposition, )): disjunct = Proposition(GdlPool.getProposition(GdlPool.getConstant(proposition.__name__.getValue() + "-" + i))) else: disjunct = Proposition(GdlPool.getRelation(GdlPool.getConstant(relation.__name__.getValue() + "-" + i), relation.getBody())) input.getOutputs().clear() link(input, disjunct) link(disjunct, or_) self.components.add(disjunct) fixItem.getInputs().clear() link(or_, fixItem) self.components.add(or_) def getProposition(self, sentence): """ generated source for method getProposition """ if not self.propositions.containsKey(sentence): self.propositions.put(sentence, Proposition(sentence)) return self.propositions.get(sentence) def link(self, source, target): """ generated source for method link """ source.addOutput(target) target.addInput(source)
class GameSelector(ActionListener): """ generated source for class GameSelector """ theGameList = JComboBox() theRepositoryList = JComboBox() theSelectedRepository = GameRepository() theCachedRepositories = Map() class NamedItem(object): """ generated source for class NamedItem """ theKey = str() theName = str() def __init__(self, theKey, theName): """ generated source for method __init__ """ self.theKey = theKey self.theName = theName def __str__(self): """ generated source for method toString """ return self.theName def __init__(self): """ generated source for method __init__ """ super(GameSelector, self).__init__() self.theGameList = JComboBox() self.theGameList.addActionListener(self) self.theRepositoryList = JComboBox() self.theRepositoryList.addActionListener(self) self.theCachedRepositories = HashMap() self.theRepositoryList.addItem("games.ggp.org/base") self.theRepositoryList.addItem("games.ggp.org/dresden") self.theRepositoryList.addItem("games.ggp.org/stanford") self.theRepositoryList.addItem("Local Game Repository") def actionPerformed(self, e): """ generated source for method actionPerformed """ if e.getSource() == self.theRepositoryList: if self.theCachedRepositories.containsKey(theRepositoryName): self.theSelectedRepository = self.theCachedRepositories.get(theRepositoryName) else: if theRepositoryName == "Local Game Repository": self.theSelectedRepository = LocalGameRepository() else: self.theSelectedRepository = CloudGameRepository(theRepositoryName) self.theCachedRepositories.put(theRepositoryName, self.theSelectedRepository) repopulateGameList() def getSelectedGameRepository(self): """ generated source for method getSelectedGameRepository """ return self.theSelectedRepository def repopulateGameList(self): """ generated source for method repopulateGameList """ theRepository = self.getSelectedGameRepository() theKeyList = ArrayList(theRepository.getGameKeys()) Collections.sort(theKeyList) self.theGameList.removeAllItems() for theKey in theKeyList: if theGame == None: continue if theName == None: theName = theKey if 24 > len(theName): theName = theName.substring(0, 24) + "..." self.theGameList.addItem(self.NamedItem(theKey, theName)) def getRepositoryList(self): """ generated source for method getRepositoryList """ return self.theRepositoryList def getGameList(self): """ generated source for method getGameList """ return self.theGameList def getSelectedGame(self): """ generated source for method getSelectedGame """ try: return self.getSelectedGameRepository().getGame((self.theGameList.getSelectedItem()).theKey) except Exception as e: return None
def getActiveTransactions(self): activeTransactions = [] whereClauses = ArrayList() whereClauses.add("FUNCNAME IN ('SAPWL_TCODE_AGGREGATION','SAPWL_TCODE_AGGREGATION_COPY')"); result = self.executeQuery("TFDIR", whereClauses, "FUNCNAME")#@@CMD_PERMISION sap protocol execution functionName = None if result.next(): functionName = result.getString("FUNCNAME") if functionName == None: logger.warn('getActiveTransactions: active transaction function is not found') return activeTransactions day = self.__client.getProperty('from_date') if day == None: today = Date() sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(today) elif day.find('/') != -1: try: sfDate = SimpleDateFormat("MM/dd/yyyy") parsedDate = sfDate.parse(day) sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(parsedDate) except: logger.reportWarning('Failed to parse date ', day) logger.debug('Parsed start date:', day) logger.debug('Active transactions from data:', day) mapTransactionToUsers = None getUsers = Boolean.parseBoolean(self.__client.getProperty("get_users")) if getUsers: mapTransactionToUsers = HashMap() funcParams = HashMap() funcParams.put('READ_START_DATE', day) funcParams.put('READ_START_TIME', '000000') funcParams.put('READ_END_DATE', day) funcParams.put('READ_END_TIME', '235959') funcParams.put('READ_ONLY_MAINRECORDS', 'X') logger.debug('executing func:SAPWL_STATREC_FROM_REMOTE_SYS(', str(funcParams),')') fields = ArrayList() fields.add('TCODE') fields.add('ACCOUNT') usersResult = self.__client.executeFunction('SAPWL_STATREC_FROM_REMOTE_SYS', funcParams, 'NORMAL_RECORDS', fields) while usersResult.next(): transaction = usersResult.getString('TCODE') if len(transaction) > 0: user = usersResult.getString("ACCOUNT"); users = mapTransactionToUsers.get(transaction) if users == None: users = HashMap() mapTransactionToUsers.put(transaction,users) users.put(user,users); self.getSites() site = self.getSites().getCell(0,0) servers = self.getServers(site) numServers = servers.getRowCount() transactionToStats = HashMap() for j in range(numServers): try: instance = servers.getCell(j,0); logger.debug('getActiveTransactions:executing function[' + functionName + '] for instance [' + instance + ']') if functionName == 'SAPWL_TCODE_AGGREGATION_COPY': records = self.callSapwlTcodeAggregationCopy(instance,day) while records.next(): transaction = (str(records.getString(0))).strip() mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if stats == None: stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if mapUsers != None: stats.users = ArrayList(mapUsers.keySet()) if records.next(): stats.steps = stats.steps + int(float(records.getString(0))) if records.next(): stats.responseTime = stats.responseTime + int(float(records.getString(0))) if records.next(): stats.cpuTime = stats.cpuTime + int(float(records.getString(0))) if records.next(): stats.dbTime = stats.dbTime + int(float(records.getString(0))) if records.next(): stats.guiTime = stats.guiTime + int(float(records.getString(0))) if records.next(): stats.roundTrips = stats.roundTrips + int(float(records.getString(0))) if records.next(): stats.text = (str(records.getString(0))).strip() else: fields = ArrayList() fields.add('ENTRY_ID') fields.add('COUNT') fields.add('RESPTI') fields.add('CPUTI') fields.add('DBTIME') fields.add('GUITIME') fields.add('GUICNT') fields.add('TEXT') records = self.getApplicationStatistics(functionName, instance, day, fields) while records.next(): entryID = records.getString("ENTRY_ID"); transaction = self.getTransactionFromEntryID(entryID); mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if(stats == None): stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if(mapUsers != None): stats.users = ArrayList(mapUsers.keySet()) count = records.getString("COUNT") stats.steps = stats.steps + int(count) stats.responseTime = stats.responseTime + int(records.getString("RESPTI")) stats.cpuTime = stats.cpuTime + int(records.getString("CPUTI")) stats.dbTime = stats.dbTime + int(records.getString("DBTIME")) stats.guiTime = stats.guiTime + int(records.getString("GUITIME")) stats.roundTrips = stats.roundTrips + int(records.getString("GUICNT")) stats.text = records.getString("TEXT") except: msg = sys.exc_info()[1] strmsg = '%s' % msg if strmsg.find('NO_DATA_FOUND') != -1: logger.debug(strmsg) logger.reportWarning('No data found in the given time range') else: logger.debugException('Unexpected error getting transactions for function:' + str(functionName)) logger.reportWarning('Unexpected error getting transactions for function:' + str(functionName) + ':' + strmsg) transactions = ArrayList(transactionToStats.keySet()) logger.debug("getActiveTransactions: Found [" + str(transactions.size()) + "] active transactions") if logger.isDebugEnabled(): logger.debug("getActiveTransactions: transactions = " + str(transactions)) transactionsInfo = self.getTransactionsInfo(transactions) it = transactionToStats.values() for stats in it: prop = Properties() prop.setProperty('data_name', str(stats.transaction)) prop.setProperty('dialog_steps', str(stats.steps)) prop.setProperty('total_response_time', str(stats.responseTime)) prop.setProperty('average_response_time', str(stats.getAverageCPUTime())) prop.setProperty('total_cpu_time', str(stats.cpuTime)) prop.setProperty('average_cpu_time', str(stats.getAverageCPUTime())) prop.setProperty('round_trips', str(stats.roundTrips)) prop.setProperty('total_db_time', str(stats.dbTime)) prop.setProperty('average_db_time', str(stats.getAverageDBTime())) prop.setProperty('total_gui_time', str(stats.guiTime)) prop.setProperty('average_gui_time', str(stats.getAverageGUITime())) prop.setProperty('text', stats.text) prop.setProperty('saptransaction_averagedbtime', str(stats.users.size())) info = transactionsInfo.get(stats.transaction) if info != None: prop.setProperty('devclass', info.devclass) prop.setProperty('program', info.program) prop.setProperty('screen', info.screen) prop.setProperty('', info.screen) else: prop.setProperty('devclass', "") prop.setProperty('program', "") prop.setProperty('screen', "") prop.setProperty('version', "") activeTransactions.append(prop) return activeTransactions
class PagingWindow(UserList): def __init__(self, **kwargs): # The list is contained w/in self.data UserList.__init__(self) self.limitSize = 0 # Unbounded self.limitMin = False # unbounded self.limitMax = False # unbounded self.minElmIndx = -1 self.minElmVal = None self.maxElmIndx = -1 self.maxElmVal = None # Create a hash map for storing an object as well. self.hash = HashMap() opts = { 'limitSize' : self.optLimitSize, 'limitMin' : self.optLimitMin, 'limitMax' : self.optLimitMax, 'preserveSmallest' : self.optPreserveSmallest, 'preserveLargest' : self.optPreserveLargest, } # Process each optional argumen. for k in kwargs.keys(): optFunc = opts[k] if optFunc is None: raise LookupError("Option [k] is not supported by the PagingWindow class.") else: optFunc(kwargs[k]) random.seed(time.time()) def optLimitSize(self, _size): if type(_size).__name__ != "int": raise ValueError("limitSize parameter must be type int. Got type [{}].".format(type(_size).__name__)) self.limitSize = _size def optLimitMin(self, _min): if type(_min).__name__ != "bool": raise ValueError("limitMin parameter must be type bool.") self.limitMin = _min def optLimitMax(self, _max): if type(_max).__name__ != "bool": raise ValueError("limitMax parameter must be type bool.") self.limitMax = _max def optPreserveSmallest(self, _small): if type(_small).__name__ != "bool": raise ValueError("preserveSmallest parameter must be type bool.") if _small: self.limitMin = False self.limitMax = True def optPreserveLargest(self, _large): if type(_large).__name__ != "bool": raise ValueError("preserveLargest parameter must be type bool.") if _large: self.limitMin = True self.limitMax = False def add(self, _key, _value = None): # print "==> value[{}] limitSize[{}]".format(_key, self.limitSize) # print "==> data.__len__[%d]" % self.data.__len__() dataLen = self.data.__len__() if dataLen < self.limitSize: ''' Here we add to the list when the list had not reached its max size. ''' # print "..> appeding to data: [%s]" % _key self.data.append(_key) if _value is not None: # print " ++> added _value[{}]".format(_value) self.hash.put(_key, _value) # We should remove the sort on every insert. # Use sortedcontainers instead. self.data.sort() else: # print "..> not appending to data: [%s]" % _key insertMinOk = True insertMaxOk = True if self.limitMin: ''' If the new value is greater than the current minElement, we may need to remove the current minElement to make room for the new value. ''' if self.data.__len__ > 0: # The minElmIndx is always 0, # unless the array has no data. self.minElmIndx = 0 else: self.minElmIndx = -1 if self.minElmIndx >= 0: self.minElmVal = self.data[self.minElmIndx] if _key < self.minElmVal: insertMinOk = False if self.limitMax: ''' If the new value is smaller than the current maxElement, we may need to remove the current maxElement to make room for the new value. ''' self.maxElmIndx = self.data.__len__() - 1 if self.maxElmIndx > 0: self.maxElmVal = self.data[self.maxElmIndx] if _key > self.maxElmVal: insertMaxOk = False if self.limitMin and self.limitMax: ''' Handle the case where it is ok to insert for either case of limitMin and limitMax ''' if insertMinOk and insertMaxOk: # choseSize() may be a custom function that gets passed in. side = self.choseSide(_key) if side == 0: raise AssertionError("chooseSide() should not return 0 as a result") if side < 0: if self.minElmVal is not None: self.data.remove(self.minElmVal) if self.hash.containsKey(self.minElmVal): self.hash.remove(self.minElmVal) if side > 0: if self.maxElmVal is not None: self.data.remove(self.maxElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.limitMin: if insertMinOk: if self.minElmVal is not None: self.data.remove(self.minElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.data.__len__() + 1 > self.limitSize: return False if self.limitMax: if insertMaxOk: if self.maxElmVal is not None: self.data.remove(self.maxElmVal) if self.hash.containsKey(self.maxElmVal): self.hash.remove(self.maxElmVal) else: if self.data.__len__() + 1 > self.limitSize: return False self.data.append(_key) if _value is not None: # print " ++> added _value[{}]".format(_value) self.hash.put(_key, _value) # We should remove the sort on every insert. # Possibly use sortedcontainers instead. self.data.sort() # Return True when a value is added return True def pop(self, indx): # Pop the 0 item from the list _key = super(UserList, self).pop(indx) # By default, return the key. retVal = _key # But, if the key has a corresponding value in the hash... if self.hash.containsKey(_key): # return the hash... retVal = [ _key, self.hash.get(_key) ] # and removed the object from the hash self.hash.remove(_key) return retVal def chooseSide(_key): r = random.getrandbits(1) if (r == 0): return -1 if (r == 1): return 1 def size(self): return self.data.__len__()
def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString( None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put( "label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = [ "title", "description", "manifest", "metaList", "relationships", "responses" ] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value elif field == "create_timestamp": self.createTimeStamp = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith( "skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len( arrParts) >= 2 and field.find( ":Relationship.") == -1 and field.find( "dc:type") == -1 and field.find( "redbox:origin" ) == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx + len( arrParts[1]) + 1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[ suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[: prefixEndIdx] self.log.debug( "Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put( arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix + compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith( self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % ( fullField, self.compFieldsConfig[compfield]["delim"], value) if field.endswith( self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug( "Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true")
from java.util import HashMap mols = request.body freqs = HashMap() iter = mols.iterator() while iter.hasNext(): mol = iter.next() atoms = mol.getPropertyObject('atom_count') freq = freqs.get(atoms) if freq != None: freqs.put(atoms, freq + 1) else: freqs.put(atoms, 1) request.body = freqs
def authenticate(self, configurationAttributes, requestParameters, step): context = Contexts.getEventContext() authenticationService = Component.getInstance(AuthenticationService) userService = Component.getInstance(UserService) mapUserDeployment = False enrollUserDeployment = False if (configurationAttributes.containsKey("gplus_deployment_type")): deploymentType = StringHelper.toLowerCase(configurationAttributes.get("gplus_deployment_type").getValue2()) if (StringHelper.equalsIgnoreCase(deploymentType, "map")): mapUserDeployment = True if (StringHelper.equalsIgnoreCase(deploymentType, "enroll")): enrollUserDeployment = True if (step == 1): print "Google+ Authenticate for step 1" gplusAuthCodeArray = requestParameters.get("gplus_auth_code") gplusAuthCode = gplusAuthCodeArray[0] # Check if user uses basic method to log in useBasicAuth = False if (StringHelper.isEmptyString(gplusAuthCode)): useBasicAuth = True # Use basic method to log in if (useBasicAuth): print "Google+ Authenticate for step 1. Basic authentication" context.set("gplus_count_login_steps", 1) credentials = Identity.instance().getCredentials() userName = credentials.getUsername() userPassword = credentials.getPassword() loggedIn = False if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)): userService = Component.getInstance(UserService) loggedIn = userService.authenticate(userName, userPassword) if (not loggedIn): return False return True # Use Google+ method to log in print "Google+ Authenticate for step 1. gplusAuthCode:", gplusAuthCode currentClientSecrets = self.getCurrentClientSecrets(self.clientSecrets, configurationAttributes, requestParameters) if (currentClientSecrets == None): print "Google+ Authenticate for step 1. Client secrets configuration is invalid" return False print "Google+ Authenticate for step 1. Attempting to gets tokens" tokenResponse = self.getTokensByCode(self.clientSecrets, configurationAttributes, gplusAuthCode); if ((tokenResponse == None) or (tokenResponse.getIdToken() == None) or (tokenResponse.getAccessToken() == None)): print "Google+ Authenticate for step 1. Failed to get tokens" return False else: print "Google+ Authenticate for step 1. Successfully gets tokens" jwt = Jwt.parse(tokenResponse.getIdToken()) # TODO: Validate ID Token Signature gplusUserUid = jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER); print "Google+ Authenticate for step 1. Found Google user ID in the ID token: ", gplusUserUid if (mapUserDeployment): # Use mapping to local IDP user print "Google+ Authenticate for step 1. Attempting to find user by oxExternalUid: gplus:", gplusUserUid # Check if there is user with specified gplusUserUid foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): print "Google+ Authenticate for step 1. Failed to find user" print "Google+ Authenticate for step 1. Setting count steps to 2" context.set("gplus_count_login_steps", 2) context.set("gplus_user_uid", gplusUserUid) return True foundUserName = foundUser.getUserId() print "Google+ Authenticate for step 1. foundUserName:"******"Google+ Authenticate for step 1. Failed to authenticate user" return False print "Google+ Authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ Authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult elif (enrollUserDeployment): # Use auto enrollment to local IDP print "Google+ Authenticate for step 1. Attempting to find user by oxExternalUid: gplus:", gplusUserUid # Check if there is user with specified gplusUserUid foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): # Auto user enrollemnt print "Google+ Authenticate for step 1. There is no user in LDAP. Adding user to local LDAP" print "Google+ Authenticate for step 1. Attempting to gets user info" userInfoResponse = self.getUserInfo(currentClientSecrets, configurationAttributes, tokenResponse.getAccessToken()) if ((userInfoResponse == None) or (userInfoResponse.getClaims().size() == 0)): print "Google+ Authenticate for step 1. Failed to get user info" return False else: print "Google+ Authenticate for step 1. Successfully gets user info" gplusResponseAttributes = userInfoResponse.getClaims() # Convert Google+ user claims to lover case gplusResponseNormalizedAttributes = HashMap() for gplusResponseAttributeEntry in gplusResponseAttributes.entrySet(): gplusResponseNormalizedAttributes.put( StringHelper.toLowerCase(gplusResponseAttributeEntry.getKey()), gplusResponseAttributeEntry.getValue()) currentAttributesMapping = self.getCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters) print "Google+ Authenticate for step 1. Using next attributes mapping", currentAttributesMapping newUser = User() for attributesMappingEntry in currentAttributesMapping.entrySet(): remoteAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() localAttributeValue = gplusResponseNormalizedAttributes.get(remoteAttribute) if (localAttribute != None): newUser.setAttribute(localAttribute, localAttributeValue) if (newUser.getAttribute("sn") == None): newUser.setAttribute("sn", gplusUserUid) if (newUser.getAttribute("cn") == None): newUser.setAttribute("cn", gplusUserUid) newUser.setAttribute("oxExternalUid", "gplus:" + gplusUserUid) print "Google+ Authenticate for step 1. Attempting to add user", gplusUserUid, " with next attributes", newUser.getCustomAttributes() foundUser = userService.addUser(newUser, True) print "Google+ Authenticate for step 1. Added new user with UID", foundUser.getUserId() foundUserName = foundUser.getUserId() print "Google+ Authenticate for step 1. foundUserName:"******"Google+ Authenticate for step 1. Failed to authenticate user" return False print "Google+ Authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ Authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult else: # Check if there is user with specified gplusUserUid print "Google+ Authenticate for step 1. Attempting to find user by uid:", gplusUserUid foundUser = userService.getUser(gplusUserUid) if (foundUser == None): print "Google+ Authenticate for step 1. Failed to find user" return False foundUserName = foundUser.getUserId() print "Google+ Authenticate for step 1. foundUserName:"******"Google+ Authenticate for step 1. Failed to authenticate user" return False print "Google+ Authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ Authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult elif (step == 2): print "Google+ Authenticate for step 2" sessionAttributes = context.get("sessionAttributes") if (sessionAttributes == None) or not sessionAttributes.containsKey("gplus_user_uid"): print "Google+ Authenticate for step 2. gplus_user_uid is empty" return False gplusUserUid = sessionAttributes.get("gplus_user_uid") passed_step1 = StringHelper.isNotEmptyString(gplusUserUid) if (not passed_step1): return False credentials = Identity.instance().getCredentials() userName = credentials.getUsername() userPassword = credentials.getPassword() loggedIn = False if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)): loggedIn = userService.authenticate(userName, userPassword) if (not loggedIn): return False # Check if there is user which has gplusUserUid # Avoid mapping Google account to more than one IDP account foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): # Add gplusUserUid to user one id UIDs foundUser = userService.addUserAttribute(userName, "oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): print "Google+ Authenticate for step 2. Failed to update current user" return False postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ Authenticate for step 2. postLoginResult:", postLoginResult return postLoginResult else: foundUserName = foundUser.getUserId() print "Google+ Authenticate for step 2. foundUserName:"******"Google+ Authenticate for step 2. postLoginResult:", postLoginResult return postLoginResult return False else: return False
def addSentenceForm(cls, form, model, components, negations, trueComponent, falseComponent, usingBase, usingInput, recursionForms, temporaryComponents, temporaryNegations, functionInfoMap, constantChecker, completedSentenceFormValues): """ generated source for method addSentenceForm """ alwaysTrueSentences = model.getSentencesListedAsTrue(form) rules = model.getRules(form) for alwaysTrueSentence in alwaysTrueSentences: if alwaysTrueSentence.__name__ == cls.LEGAL or alwaysTrueSentence.__name__ == cls.NEXT or alwaysTrueSentence.__name__ == cls.GOAL: trueComponent.addOutput(prop) prop.addInput(trueComponent) components.put(alwaysTrueSentence, trueComponent) negations.put(alwaysTrueSentence, falseComponent) continue if usingInput and form.__name__ == cls.DOES: for inputSentence in constantChecker.getTrueSentences(inputForm): components.put(doesSentence, prop) return if usingBase and form.__name__ == cls.TRUE: for baseSentence in constantChecker.getTrueSentences(baseForm): components.put(trueSentence, prop) return inputsToOr = HashMap() for rule in rules: varsInLiveConjuncts.addAll(GdlUtils.getVariables(rule.getHead())) while asnItr.hasNext(): if assignment == None: continue ConcurrencyUtils.checkForInterruption() for literal in rule.getBody(): if isinstance(literal, (GdlSentence, )): if constantChecker.isConstantForm(conjunctForm): if not constantChecker.isTrueConstant(transformed): asnItr.changeOneInNext(varsToChange, assignment) componentsToConnect.add(None) continue if conj == None: conj = temporaryComponents.get(transformed) if conj == None and SentenceModelUtils.inSentenceFormGroup(transformed, recursionForms): temporaryComponents.put(transformed, tempProp) conj = tempProp if conj == None or isThisConstant(conj, falseComponent): asnItr.changeOneInNext(varsInConjunct, assignment) componentsToConnect.add(None) continue componentsToConnect.add(conj) elif isinstance(literal, (GdlNot, )): if constantChecker.isConstantForm(conjunctForm): if constantChecker.isTrueConstant(transformed): asnItr.changeOneInNext(varsToChange, assignment) componentsToConnect.add(None) continue if isThisConstant(conj, falseComponent): asnItr.changeOneInNext(varsInConjunct, assignment) componentsToConnect.add(None) continue if conj == None: conj = temporaryNegations.get(transformed) if conj == None and SentenceModelUtils.inSentenceFormGroup(transformed, recursionForms): if positive == None: positive = temporaryComponents.get(transformed) if positive == None: temporaryComponents.put(transformed, tempProp) positive = tempProp not_.addInput(positive) positive.addOutput(not_) temporaryNegations.put(transformed, not_) conj = not_ if conj == None: if positive == None: continue if existingNotOutput != None: componentsToConnect.add(existingNotOutput) negations.put(transformed, existingNotOutput) continue not_.addInput(positive) positive.addOutput(not_) negations.put(transformed, not_) conj = not_ componentsToConnect.add(conj) elif isinstance(literal, (GdlDistinct, )): else: raise RuntimeException("Unwanted GdlLiteral type") if not componentsToConnect.contains(None): andify(componentsToConnect, andComponent, trueComponent) if not isThisConstant(andComponent, falseComponent): if not inputsToOr.containsKey(sentence): inputsToOr.put(sentence, HashSet()) inputsToOr.get(sentence).add(andComponent) if preventDuplicatesFromConstants: asnItr.changeOneInNext(varsInLiveConjuncts, assignment) for entry in inputsToOr.entrySet(): ConcurrencyUtils.checkForInterruption() for input in inputs: if isinstance(input, (Constant, )) or input.getInputs().size() == 0: realInputs.add(input) else: realInputs.add(input.getSingleInput()) input.getSingleInput().removeOutput(input) input.removeAllInputs() cls.orify(realInputs, prop, falseComponent) components.put(sentence, prop) if form.__name__ == cls.TRUE or form.__name__ == cls.DOES: for sentence in model.getDomain(form): ConcurrencyUtils.checkForInterruption() components.put(sentence, prop)
class IndexData: def __activate__(self, context): # Prepare variables self.index = context["fields"] self.object = context["object"] self.payload = context["payload"] self.params = context["params"] self.utils = context["pyUtils"] self.config = context["jsonConfig"] self.log = context["log"] self.last_modified = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) self.log.debug("Indexing Metadata Record '{}' '{}'", self.object.getId(), self.payload.getId()) # Common data self.__newDoc() self.packagePid = None pidList = self.object.getPayloadIdList() for pid in pidList: if pid.endswith(".tfpackage"): self.packagePid = pid # Real metadata if self.itemType == "object": self.__basicData() self.__metadata() # Some of the above steps may request some # messages be sent, particularly workflows self.__messages() # Make sure security comes after workflows self.__security() def __newDoc(self): self.oid = self.object.getId() self.pid = self.payload.getId() metadataPid = self.params.getProperty("metaPid", "DC") self.utils.add(self.index, "storage_id", self.oid) if self.pid == metadataPid: self.itemType = "object" else: self.oid += "/" + self.pid self.itemType = "datastream" self.utils.add(self.index, "identifier", self.pid) self.utils.add(self.index, "id", self.oid) self.utils.add(self.index, "item_type", self.itemType) self.utils.add(self.index, "last_modified", self.last_modified) self.utils.add(self.index, "harvest_config", self.params.getProperty("jsonConfigOid")) self.utils.add(self.index, "harvest_rules", self.params.getProperty("rulesOid")) self.item_security = [] self.owner = self.params.getProperty("owner", "guest") formatter = SimpleDateFormat('yyyyMMddHHmmss') self.params.setProperty("last_modified", formatter.format(Date())) self.utils.add(self.index, "date_object_created", self.params.getProperty("date_object_created")) self.params.setProperty( "date_object_modified", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime())) self.utils.add(self.index, "date_object_modified", self.params.getProperty("date_object_modified")) def __basicData(self): self.utils.add(self.index, "repository_name", self.params["repository.name"]) self.utils.add(self.index, "repository_type", self.params["repository.type"]) if self.params["date_transitioned"] is not None: self.utils.add(self.index, "date_transitioned", self.params["date_transitioned"]) # VITAL integration vitalPid = self.params["vitalPid"] if vitalPid is not None: self.utils.add(self.index, "vitalPid", vitalPid) # Persistent Identifiers pidProperty = self.config.getString(None, ["curation", "pidProperty"]) if pidProperty is None: self.log.error("No configuration found for persistent IDs!") else: pid = self.params[pidProperty] if pid is not None: self.utils.add(self.index, "known_ids", pid) self.utils.add(self.index, "pidProperty", pid) self.utils.add(self.index, "oai_identifier", pid) self.utils.add(self.index, "oai_set", "default") # Publication published = self.params["published"] if published is not None: self.utils.add(self.index, "published", "true") def __security(self): # Security roles = self.utils.getRolesWithAccess(self.oid) if roles is not None: # For every role currently with access for role in roles: # Should show up, but during debugging we got a few if role != "": if role in self.item_security: # They still have access self.utils.add(self.index, "security_filter", role) else: # Their access has been revoked self.__revokeRoleAccess(role) # Now for every role that the new step allows access for role in self.item_security: if role not in roles: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) # No existing security else: if self.item_security is None: # Guest access if none provided so far self.__grantRoleAccess("guest") self.utils.add(self.index, "security_filter", role) else: # Otherwise use workflow security for role in self.item_security: # Grant access if new self.__grantRoleAccess(role) self.utils.add(self.index, "security_filter", role) users = self.utils.getUsersWithAccess(self.oid) if users is not None: # For every role currently with access for user in users: self.utils.add(self.index, "security_exception", user) # Ownership if self.owner is None: self.utils.add(self.index, "owner", "system") else: self.utils.add(self.index, "owner", self.owner) def __indexList(self, name, values): # convert to set so no duplicate values for value in HashSet(values): self.utils.add(self.index, name, value) def __grantRoleAccess(self, newRole): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("role", newRole) self.utils.setAccessSchema(schema) def __grantUserAccess(self, newUser): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("user", newUser) self.utils.setAccessSchema(schema) def __revokeRoleAccess(self, oldRole): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("role", oldRole) self.utils.removeAccessSchema(schema) def __revokeUserAccess(self, oldUser): schema = self.utils.getAccessSchema() schema.setRecordId(self.oid) schema.set("user", oldUser) self.utils.removeAccessSchema(schema) def __metadata(self): self.title = None self.dcType = None self.descriptionList = [] self.creatorList = [] self.creationDate = [] self.contributorList = [] self.approverList = [] self.formatList = ["application/x-fascinator-package"] self.fulltext = [] self.relationDict = {} self.customFields = {} self.creatorFullNameMap = HashMap() self.grantNumberList = [] self.arrayBucket = HashMap() self.compFields = [ "dc:coverage.vivo:DateTimeInterval", "locrel:prc.foaf:Person" ] self.compFieldsConfig = { "dc:coverage.vivo:DateTimeInterval": { "delim": " to ", "start": "start", "end": "end" }, "locrel:prc.foaf:Person": { "delim": ", ", "start": "familyName", "end": "givenName" } } self.reportingFieldPrefix = "reporting_" self.embargoedDate = None self.createTimeStamp = None # Try our data sources, order matters self.__workflow() # Some defaults if the above failed if self.title is None: self.title = "New Dataset" if self.formatList == []: source = self.object.getPayload(self.packagePid) self.formatList.append(source.getContentType()) # Index our metadata finally self.utils.add(self.index, "dc_title", self.title) if self.dcType is not None: self.utils.add(self.index, "dc_type", self.dcType) self.__indexList( "dc_creator", self.creatorList) #no dc_author in schema.xml, need to check self.__indexList("dc_contributor", self.contributorList) self.__indexList("dc_description", self.descriptionList) self.__indexList("dc_format", self.formatList) self.__indexList("dc_date", self.creationDate) self.__indexList("full_text", self.fulltext) for key in self.customFields: self.__indexList(key, self.customFields[key]) for key in self.relationDict: self.__indexList(key, self.relationDict[key]) if self.arrayBucket.size() > 0: for arrFldName in self.arrayBucket.keySet(): if arrFldName.endswith("Person") or arrFldName.replace( self.reportingFieldPrefix, "") in self.compFields: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName).values()) else: self.__indexList(arrFldName, self.arrayBucket.get(arrFldName)) if self.embargoedDate is not None: self.utils.add(self.index, "date_embargoed", self.embargoedDate + "T00:00:00Z") if self.createTimeStamp is None: self.utils.add( self.index, "create_timestamp", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime())) def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString( None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put( "label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = [ "title", "description", "manifest", "metaList", "relationships", "responses" ] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value elif field == "create_timestamp": self.createTimeStamp = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith( "skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len( arrParts) >= 2 and field.find( ":Relationship.") == -1 and field.find( "dc:type") == -1 and field.find( "redbox:origin" ) == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx + len( arrParts[1]) + 1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[ suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[: prefixEndIdx] self.log.debug( "Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put( arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix + compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith( self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % ( fullField, self.compFieldsConfig[compfield]["delim"], value) if field.endswith( self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug( "Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true") def __messages(self): if self.message_list is not None and len(self.message_list) > 0: msg = JsonSimple() msg.getJsonObject().put("oid", self.oid) message = msg.toString() for target in self.message_list: self.utils.sendMessage(target, message) def __getJsonPayload(self, pid): payload = self.object.getPayload(pid) json = self.utils.getJsonObject(payload.open()) payload.close() return json
class TimerTab(ITab, IHttpListener): def __init__(self, callbacks, helpers): self._callbacks = callbacks self._helpers = helpers self.isRunning = True self.toolFilter = 0 self.reqResMap = HashMap() callbacks.registerHttpListener(self) self.panel = TimerPanel( logtable_factory=lambda model: LogTable(model, self._callbacks), external_clear_button_action_listener=lambda e: self.getReqResMap( ).clear(), external_start_button_action_listener=lambda e: self.setRunning( True), external_stop_button_action_listener=lambda e: self.setRunning( False), external_filter_action_listener=self.filter_action_listener, tools_keys=["All", "Proxy", "Intruder", "Scanner", "Repeater"]) def getTabCaption(self): """ Override ITab method :return: tab name """ return "InQL Timer" def getUiComponent(self): """ Override ITab method :return: Tab UI Component """ self._callbacks.customizeUiComponent(self.panel.this) return self.panel.this def filter_action_listener(self, e): tool = e.getSource().getSelectedItem() if tool == "All": self.setToolFilter(0) elif tool == "Proxy": self.setToolFilter(IBurpExtenderCallbacks.TOOL_PROXY) elif tool == "Intruder": self.setToolFilter(IBurpExtenderCallbacks.TOOL_INTRUDER) elif tool == "Scanner": self.setToolFilter(IBurpExtenderCallbacks.TOOL_SCANNER) elif tool == "Repeater": self.setToolFilter(IBurpExtenderCallbacks.TOOL_REPEATER) else: raise RuntimeError("Unknown tool: %s" % tool) def setRunning(self, running): self.isRunning = running def setToolFilter(self, toolFilter): self.toolFilter = toolFilter def processHttpMessage(self, toolFlag, messageIsRequest, requestResponse): if self.isRunning: if self.toolFilter == 0 or self.toolFilter == toolFlag: messageInfo = self._helpers.analyzeRequest(requestResponse) url = messageInfo.getUrl() requestBody = requestResponse.getRequest( )[messageInfo.getBodyOffset():].tostring() if not is_query(requestBody): return # exit early qobj = json.loads(requestBody) queryBody = "" operationName = "" if 'query' in qobj: queryBody = qobj['query'] if 'operationName' in qobj: operationName = qobj['operationName'] if messageIsRequest: self.reqResMap.put(url, System.currentTimeMillis()) elif self.reqResMap.containsKey(url): time = System.currentTimeMillis() - self.reqResMap.get(url) self.reqResMap.remove(url) # create a new log entry with the message details synchronize.apply_synchronized( self.panel.getLogTableModel().getLogArray(), self.syncProcessHttpMessage, (toolFlag, requestResponse, time, queryBody, operationName)) def syncProcessHttpMessage(self, toolFlag, messageInfo, time, queryBody, operationName): row = self.panel.getLogTableModel().getLogArray().size() # Log all requests - the default if not self.panel.getQueryFilterText( ) and not self.panel.isScopeSelected(): self.addLog(messageInfo, toolFlag, time, row, operationName) # Log filter URL requests elif not self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \ self.panel.getQueryFilterText() in queryBody: self.addLog(messageInfo, toolFlag, time, row, operationName) # Log in-scope requests elif self.panel.isScopeSelected() and not self.panel.getQueryFilterText() and \ self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()): self.addLog(messageInfo, toolFlag, time, row, operationName) # Log in-scope requests and filter elif self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \ self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()) and \ self.panel.getQueryFilterText() in queryBody: self.addLog(messageInfo, toolFlag, time, row, operationName) def addLog(self, messageInfo, toolFlag, time, row, operationName): self.panel.getLogTableModel().getLogArray().add( Log( LocalDateTime.now(), self._callbacks.getToolName(toolFlag), self._callbacks.saveBuffersToTempFiles(messageInfo), self._helpers.analyzeRequest(messageInfo).getUrl(), self._helpers.analyzeResponse( messageInfo.getResponse()).getStatusCode(), operationName, time)) self.panel.getLogTableModel().fireTableRowsInserted(row, row) def getReqResMap(self): return self.reqResMap
def __workflow(self): # Workflow data WORKFLOW_ID = "dataset" wfChanged = False workflow_security = [] self.message_list = None stages = self.config.getJsonSimpleList(["stages"]) if self.owner == "guest": pageTitle = "Submission Request" displayType = "submission-request" initialStep = 0 else: pageTitle = "Metadata Record" displayType = "package-dataset" initialStep = 1 try: wfMeta = self.__getJsonPayload("workflow.metadata") wfMeta.getJsonObject().put("pageTitle", pageTitle) # Are we indexing because of a workflow progression? targetStep = wfMeta.getString(None, ["targetStep"]) if targetStep is not None and targetStep != wfMeta.getString(None, ["step"]): wfChanged = True # Step change wfMeta.getJsonObject().put("step", targetStep) wfMeta.getJsonObject().remove("targetStep") # This must be a re-index then else: targetStep = wfMeta.getString(None, ["step"]) # Security change for stage in stages: if stage.getString(None, ["name"]) == targetStep: wfMeta.getJsonObject().put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) if wfChanged == True: self.message_list = stage.getStringList(["message"]) except StorageException: # No workflow payload, time to create initialStage = stages.get(initialStep).getString(None, ["name"]) wfChanged = True wfMeta = JsonSimple() wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", initialStage) wfMetaObj.put("pageTitle", pageTitle) stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == initialStage: wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload" # Form processing coreFields = ["title", "description", "manifest", "metaList", "relationships", "responses"] formData = wfMeta.getObject(["formData"]) if formData is not None: formData = JsonSimple(formData) # Core fields description = formData.getStringList(["description"]) if description: self.descriptionList = description # Non-core fields data = formData.getJsonObject() for field in data.keySet(): if field not in coreFields: self.customFields[field] = formData.getStringList([field]) # Manifest processing (formData not present in wfMeta) manifest = self.__getJsonPayload(self.packagePid) formTitles = manifest.getStringList(["title"]) if formTitles: for formTitle in formTitles: if self.title is None: self.title = formTitle self.descriptionList = [manifest.getString("", ["description"])] #Used to make sure we have a created date createdDateFlag = False formData = manifest.getJsonObject() for field in formData.keySet(): if field not in coreFields: value = formData.get(field) if value is not None and value.strip() != "": self.utils.add(self.index, field, value) # We want to sort by date of creation, so it # needs to be indexed as a date (ie. 'date_*') if field == "dc:created": parsedTime = time.strptime(value, "%Y-%m-%d") solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ", parsedTime) self.utils.add(self.index, "date_created", solrTime) self.log.debug("Set created date to :%s" % solrTime) createdDateFlag = True elif field == "redbox:embargo.dc:date": self.embargoedDate = value # try to extract some common fields for faceting if field.startswith("dc:") and \ not (field.endswith(".dc:identifier.rdf:PlainLiteral") \ or field.endswith(".dc:identifier") \ or field.endswith(".rdf:resource")): # index dublin core fields for faceting basicField = field.replace("dc:", "dc_") dot = field.find(".") if dot > 0: facetField = basicField[:dot] else: facetField = basicField #print "Indexing DC field '%s':'%s'" % (field, facetField) if facetField == "dc_title": if self.title is None: self.title = value elif facetField == "dc_type": if self.dcType is None: self.dcType = value elif facetField == "dc_creator": if basicField.endswith("foaf_name"): self.utils.add(self.index, "dc_creator", value) else: self.utils.add(self.index, facetField, value) # index keywords for lookup if field.startswith("dc:subject.vivo:keyword."): self.utils.add(self.index, "keywords", value) # check if this is an array field fnameparts = field.split(":") if fnameparts is not None and len(fnameparts) >= 3: if field.startswith("bibo") or field.startswith("skos"): arrParts = fnameparts[1].split(".") else: arrParts = fnameparts[2].split(".") # we're not interested in: Relationship, Type and some redbox:origin if arrParts is not None and len(arrParts) >= 2 and field.find(":Relationship.") == -1 and field.find("dc:type") == -1 and field.find("redbox:origin") == -1 and arrParts[1].isdigit(): # we've got an array field fldPart = ":%s" % arrParts[0] prefixEndIdx = field.find(fldPart) + len(fldPart) suffixStartIdx = prefixEndIdx+len(arrParts[1])+1 arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[suffixStartIdx:] if field.endswith("Name"): arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] self.log.debug("Array Field name is:%s from: %s, with value:%s" % (arrFldName, field, value)) if field.endswith("Name"): fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) idx = arrParts[1] fullField = fullFieldMap.get(idx) if (fullField is None): fullField = "" if (field.endswith("givenName")): fullField = "%s, %s" % (fullField, value) if (field.endswith("familyName")): fullField = "%s%s" % (value, fullField) self.log.debug("fullname now is :%s" % fullField) fullFieldMap.put(idx, fullField) else: fieldlist = self.arrayBucket.get(arrFldName) if fieldlist is None: fieldlist = [] self.arrayBucket.put(arrFldName, fieldlist) fieldlist.append(value) for compfield in self.compFields: if field.startswith(compfield): arrFldName = self.reportingFieldPrefix +compfield fullFieldMap = self.arrayBucket.get(arrFldName) if fullFieldMap is None: fullFieldMap = HashMap() self.arrayBucket.put(arrFldName, fullFieldMap) fullField = fullFieldMap.get("1") if fullField is None: fullField = "" if field.endswith(self.compFieldsConfig[compfield]["end"]): fullField = "%s%s%s" % (fullField, self.compFieldsConfig[compfield]["delim"] ,value) if field.endswith(self.compFieldsConfig[compfield]["start"]): fullField = "%s%s" % (value, fullField) self.log.debug("full field now is :%s" % fullField) fullFieldMap.put("1", fullField) self.utils.add(self.index, "display_type", displayType) # Make sure we have a creation date if not createdDateFlag: self.utils.add(self.index, "date_created", self.last_modified) self.log.debug("Forced creation date to %s because it was not explicitly set." % self.last_modified) # Workflow processing wfStep = wfMeta.getString(None, ["step"]) self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfStep) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group) if self.owner is not None: self.utils.add(self.index, "workflow_security", self.owner) # set OAI-PMH status to deleted if wfStep == "retired": self.utils.add(self.index, "oai_deleted", "true")
def authenticate(self, configurationAttributes, requestParameters, step): context = Contexts.getEventContext() authenticationService = AuthenticationService.instance() userService = UserService.instance() encryptionService = EncryptionService.instance() mapUserDeployment = False enrollUserDeployment = False if (configurationAttributes.containsKey("gplus_deployment_type")): deploymentType = StringHelper.toLowerCase(configurationAttributes.get("gplus_deployment_type").getValue2()) if (StringHelper.equalsIgnoreCase(deploymentType, "map")): mapUserDeployment = True if (StringHelper.equalsIgnoreCase(deploymentType, "enroll")): enrollUserDeployment = True if (step == 1): print "Google+ authenticate for step 1" gplusAuthCodeArray = requestParameters.get("gplus_auth_code") gplusAuthCode = gplusAuthCodeArray[0] # Check if user uses basic method to log in useBasicAuth = False if (StringHelper.isEmptyString(gplusAuthCode)): useBasicAuth = True # Use basic method to log in if (useBasicAuth): print "Google+ authenticate for step 1. Basic authentication" context.set("gplus_count_login_steps", 1) credentials = Identity.instance().getCredentials() userName = credentials.getUsername() userPassword = credentials.getPassword() loggedIn = False if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)): userService = UserService.instance() loggedIn = userService.authenticate(userName, userPassword) if (not loggedIn): return False return True # Use Google+ method to log in print "Google+ authenticate for step 1. gplusAuthCode:", gplusAuthCode currentClientSecrets = self.getCurrentClientSecrets(self.clientSecrets, configurationAttributes, requestParameters) if (currentClientSecrets == None): print "Google+ authenticate for step 1. Client secrets configuration is invalid" return False print "Google+ authenticate for step 1. Attempting to gets tokens" tokenResponse = self.getTokensByCode(self.clientSecrets, configurationAttributes, gplusAuthCode); if ((tokenResponse == None) or (tokenResponse.getIdToken() == None) or (tokenResponse.getAccessToken() == None)): print "Google+ authenticate for step 1. Failed to get tokens" return False else: print "Google+ authenticate for step 1. Successfully gets tokens" jwt = Jwt.parse(tokenResponse.getIdToken()) # TODO: Validate ID Token Signature gplusUserUid = jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER); print "Google+ authenticate for step 1. Found Google user ID in the ID token: ", gplusUserUid if (mapUserDeployment): # Use mapping to local IDP user print "Google+ authenticate for step 1. Attempting to find user by oxExternalUid: gplus:", gplusUserUid # Check if there is user with specified gplusUserUid foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): print "Google+ authenticate for step 1. Failed to find user" print "Google+ authenticate for step 1. Setting count steps to 2" context.set("gplus_count_login_steps", 2) context.set("gplus_user_uid", encryptionService.encrypt(gplusUserUid)) return True foundUserName = foundUser.getUserId() print "Google+ authenticate for step 1. foundUserName:"******"Google+ authenticate for step 1. Failed to authenticate user" return False print "Google+ authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult elif (enrollUserDeployment): # Use auto enrollment to local IDP print "Google+ authenticate for step 1. Attempting to find user by oxExternalUid: gplus:", gplusUserUid # Check if there is user with specified gplusUserUid foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): # Auto user enrollemnt print "Google+ authenticate for step 1. There is no user in LDAP. Adding user to local LDAP" print "Google+ authenticate for step 1. Attempting to gets user info" userInfoResponse = self.getUserInfo(currentClientSecrets, configurationAttributes, tokenResponse.getAccessToken()) if ((userInfoResponse == None) or (userInfoResponse.getClaims().size() == 0)): print "Google+ authenticate for step 1. Failed to get user info" return False else: print "Google+ authenticate for step 1. Successfully gets user info" gplusResponseAttributes = userInfoResponse.getClaims() # Convert Google+ user claims to lover case gplusResponseNormalizedAttributes = HashMap() for gplusResponseAttributeEntry in gplusResponseAttributes.entrySet(): gplusResponseNormalizedAttributes.put( StringHelper.toLowerCase(gplusResponseAttributeEntry.getKey()), gplusResponseAttributeEntry.getValue()) currentAttributesMapping = self.getCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters) print "Google+ authenticate for step 1. Using next attributes mapping", currentAttributesMapping newUser = User() for attributesMappingEntry in currentAttributesMapping.entrySet(): idpAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() localAttributeValue = gplusResponseNormalizedAttributes.get(idpAttribute) if (localAttribute != None): newUser.setAttribute(localAttribute, localAttributeValue) if (newUser.getAttribute("sn") == None): newUser.setAttribute("sn", gplusUserUid) if (newUser.getAttribute("cn") == None): newUser.setAttribute("cn", gplusUserUid) newUser.setAttribute("oxExternalUid", "gplus:" + gplusUserUid) print "Google+ authenticate for step 1. Attempting to add user", gplusUserUid, " with next attributes", newUser.getCustomAttributes() foundUser = userService.addUser(newUser) print "Google+ authenticate for step 1. Added new user with UID", foundUser.getUserId() foundUserName = foundUser.getUserId() print "Google+ authenticate for step 1. foundUserName:"******"Google+ authenticate for step 1. Failed to authenticate user" return False print "Google+ authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult else: # Check if the is user with specified gplusUserUid print "Google+ authenticate for step 1. Attempting to find user by uid:", gplusUserUid foundUser = userService.getUser(gplusUserUid) if (foundUser == None): print "Google+ authenticate for step 1. Failed to find user" return False foundUserName = foundUser.getUserId() print "Google+ authenticate for step 1. foundUserName:"******"Google+ authenticate for step 1. Failed to authenticate user" return False print "Google+ authenticate for step 1. Setting count steps to 1" context.set("gplus_count_login_steps", 1) postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ authenticate for step 1. postLoginResult:", postLoginResult return postLoginResult elif (step == 2): print "Google+ authenticate for step 2" gplusUserUidArray = requestParameters.get("gplus_user_uid") if ArrayHelper.isEmpty(gplusUserUidArray): print "Google+ authenticate for step 2. gplus_user_uid is empty" return False gplusUserUid = encryptionService.decrypt(gplusUserUidArray[0]) passedStep1 = StringHelper.isNotEmptyString(gplusUserUid) if (not passedStep1): return False credentials = Identity.instance().getCredentials() userName = credentials.getUsername() userPassword = credentials.getPassword() loggedIn = False if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)): loggedIn = userService.authenticate(userName, userPassword) if (not loggedIn): return False # Check if there is user which has gplusUserUid # Avoid mapping Google account to more than one IDP account foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): # Add gplusUserUid to user one id UIDs foundUser = userService.addUserAttribute(userName, "oxExternalUid", "gplus:" + gplusUserUid) if (foundUser == None): print "Google+ authenticate for step 2. Failed to update current user" return False postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser) print "Google+ authenticate for step 2. postLoginResult:", postLoginResult return postLoginResult else: foundUserName = foundUser.getUserId() print "Google+ authenticate for step 2. foundUserName:"******"Google+ authenticate for step 2. postLoginResult:", postLoginResult return postLoginResult return False else: return False
def authenticate(self, configurationAttributes, requestParameters, step): context = Contexts.getEventContext() authenticationService = AuthenticationService.instance() userService = UserService.instance() saml_map_user = False saml_enroll_user = False saml_enroll_all_user_attr = False # Use saml_deployment_type only if there is no attributes mapping if (configurationAttributes.containsKey("saml_deployment_type")): saml_deployment_type = StringHelper.toLowerCase(configurationAttributes.get("saml_deployment_type").getValue2()) if (StringHelper.equalsIgnoreCase(saml_deployment_type, "map")): saml_map_user = True if (StringHelper.equalsIgnoreCase(saml_deployment_type, "enroll")): saml_enroll_user = True if (StringHelper.equalsIgnoreCase(saml_deployment_type, "enroll_all_attr")): saml_enroll_all_user_attr = True saml_allow_basic_login = False if (configurationAttributes.containsKey("saml_allow_basic_login")): saml_allow_basic_login = StringHelper.toBoolean(configurationAttributes.get("saml_allow_basic_login").getValue2(), False) use_basic_auth = False if (saml_allow_basic_login): # Detect if user used basic authnetication method credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() if (StringHelper.isNotEmpty(user_name) and StringHelper.isNotEmpty(user_password)): use_basic_auth = True if ((step == 1) and saml_allow_basic_login and use_basic_auth): print "Saml. Authenticate for step 1. Basic authentication" context.set("saml_count_login_steps", 1) credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() logged_in = False if (StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password)): userService = UserService.instance() logged_in = userService.authenticate(user_name, user_password) if (not logged_in): return False return True if (step == 1): print "Saml. Authenticate for step 1" currentSamlConfiguration = self.getCurrentSamlConfiguration(self.samlConfiguration, configurationAttributes, requestParameters) if (currentSamlConfiguration == None): print "Saml. Prepare for step 1. Client saml configuration is invalid" return False saml_response_array = requestParameters.get("SAMLResponse") if ArrayHelper.isEmpty(saml_response_array): print "Saml. Authenticate for step 1. saml_response is empty" return False saml_response = saml_response_array[0] print "Saml. Authenticate for step 1. saml_response:", saml_response samlResponse = Response(currentSamlConfiguration) samlResponse.loadXmlFromBase64(saml_response) saml_validate_response = True if (configurationAttributes.containsKey("saml_validate_response")): saml_validate_response = StringHelper.toBoolean(configurationAttributes.get("saml_validate_response").getValue2(), False) if (saml_validate_response): if (not samlResponse.isValid()): print "Saml. Authenticate for step 1. saml_response isn't valid" saml_response_name_id = samlResponse.getNameId() if (StringHelper.isEmpty(saml_response_name_id)): print "Saml. Authenticate for step 1. saml_response_name_id is invalid" return False print "Saml. Authenticate for step 1. saml_response_name_id:", saml_response_name_id saml_response_attributes = samlResponse.getAttributes() print "Saml. Authenticate for step 1. attributes: ", saml_response_attributes # Use persistent Id as saml_user_uid saml_user_uid = saml_response_name_id if (saml_map_user): # Use mapping to local IDP user print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml:", saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute("oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" print "Saml. Authenticate for step 1. Setting count steps to 2" context.set("saml_count_login_steps", 2) context.set("saml_user_uid", saml_user_uid) return True found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name:", found_user_name user_authenticated = authenticationService.authenticate(found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result:", post_login_result return post_login_result elif (saml_enroll_user): # Use auto enrollment to local IDP print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml:", saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute("oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): # Auto user enrollemnt print "Saml. Authenticate for step 1. There is no user in LDAP. Adding user to local LDAP" # Convert saml result attributes keys to lover case saml_response_normalized_attributes = HashMap() for saml_response_attribute_entry in saml_response_attributes.entrySet(): saml_response_normalized_attributes.put( StringHelper.toLowerCase(saml_response_attribute_entry.getKey()), saml_response_attribute_entry.getValue()) currentAttributesMapping = self.prepareCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters) print "Saml. Authenticate for step 1. Using next attributes mapping", currentAttributesMapping newUser = User() for attributesMappingEntry in currentAttributesMapping.entrySet(): idpAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() localAttributeValue = saml_response_normalized_attributes.get(idpAttribute) if (localAttribute != None): newUser.setAttribute(localAttribute, localAttributeValue) newUser.setAttribute("oxExternalUid", "saml:" + saml_user_uid) print "Saml. Authenticate for step 1. Attempting to add user", saml_user_uid, " with next attributes", newUser.getCustomAttributes() find_user_by_uid = userService.addUser(newUser, True) print "Saml. Authenticate for step 1. Added new user with UID", find_user_by_uid.getUserId() found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name:", found_user_name user_authenticated = authenticationService.authenticate(found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result:", post_login_result return post_login_result elif (saml_enroll_all_user_attr): print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml:" + saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute("oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" user = User() customAttributes = ArrayList() for key in attributes.keySet(): ldapAttributes = attributeService.getAllAttributes() for ldapAttribute in ldapAttributes: saml2Uri = ldapAttribute.getSaml2Uri() if(saml2Uri == None): saml2Uri = attributeService.getDefaultSaml2Uri(ldapAttribute.getName()) if(saml2Uri == key): attribute = CustomAttribute(ldapAttribute.getName()) attribute.setValues(attributes.get(key)) customAttributes.add(attribute) attribute = CustomAttribute("oxExternalUid") attribute.setValue("saml:" + saml_user_uid) customAttributes.add(attribute) user.setCustomAttributes(customAttributes) if(user.getAttribute("sn") == None): attribute = CustomAttribute("sn") attribute.setValue(saml_user_uid) customAttributes.add(attribute) if(user.getAttribute("cn") == None): attribute = CustomAttribute("cn") attribute.setValue(saml_user_uid) customAttributes.add(attribute) find_user_by_uid = userService.addUser(user, True) print "Saml. Authenticate for step 1. Added new user with UID", find_user_by_uid.getUserId() found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name:", found_user_name user_authenticated = authenticationService.authenticate(found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result:", post_login_result return post_login_result else: # Check if the is user with specified saml_user_uid print "Saml. Authenticate for step 1. Attempting to find user by uid:", saml_user_uid find_user_by_uid = userService.getUser(saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" return False found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name:", found_user_name user_authenticated = authenticationService.authenticate(found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result:", post_login_result return post_login_result elif (step == 2): print "Saml. Authenticate for step 2" sessionAttributes = context.get("sessionAttributes") if (sessionAttributes == None) or not sessionAttributes.containsKey("saml_user_uid"): print "Saml. Authenticate for step 2. saml_user_uid is empty" return False saml_user_uid = sessionAttributes.get("saml_user_uid") passed_step1 = StringHelper.isNotEmptyString(saml_user_uid) if (not passed_step1): return False credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() logged_in = False if (StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password)): logged_in = userService.authenticate(user_name, user_password) if (not logged_in): return False # Check if there is user which has saml_user_uid # Avoid mapping Saml account to more than one IDP account find_user_by_uid = userService.getUserByAttribute("oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): # Add saml_user_uid to user one id UIDs find_user_by_uid = userService.addUserAttribute(user_name, "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 2. Failed to update current user" return False post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 2. post_login_result:", post_login_result return post_login_result else: found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 2. found_user_name:", found_user_name if StringHelper.equals(user_name, found_user_name): post_login_result = self.samlExtensionPostLogin(configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 2. post_login_result:", post_login_result return post_login_result return False else: return False
def authenticate(self, configurationAttributes, requestParameters, step): context = Contexts.getEventContext() authenticationService = AuthenticationService.instance() userService = UserService.instance() saml_map_user = False saml_enroll_user = False saml_enroll_all_user_attr = False # Use saml_deployment_type only if there is no attributes mapping if (configurationAttributes.containsKey("saml_deployment_type")): saml_deployment_type = StringHelper.toLowerCase( configurationAttributes.get( "saml_deployment_type").getValue2()) if (StringHelper.equalsIgnoreCase(saml_deployment_type, "map")): saml_map_user = True if (StringHelper.equalsIgnoreCase(saml_deployment_type, "enroll")): saml_enroll_user = True if (StringHelper.equalsIgnoreCase(saml_deployment_type, "enroll_all_attr")): saml_enroll_all_user_attr = True saml_allow_basic_login = False if (configurationAttributes.containsKey("saml_allow_basic_login")): saml_allow_basic_login = StringHelper.toBoolean( configurationAttributes.get( "saml_allow_basic_login").getValue2(), False) use_basic_auth = False if (saml_allow_basic_login): # Detect if user used basic authnetication method credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() if (StringHelper.isNotEmpty(user_name) and StringHelper.isNotEmpty(user_password)): use_basic_auth = True if ((step == 1) and saml_allow_basic_login and use_basic_auth): print "Saml. Authenticate for step 1. Basic authentication" context.set("saml_count_login_steps", 1) credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() logged_in = False if (StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password)): userService = UserService.instance() logged_in = userService.authenticate(user_name, user_password) if (not logged_in): return False return True if (step == 1): print "Saml. Authenticate for step 1" currentSamlConfiguration = self.getCurrentSamlConfiguration( self.samlConfiguration, configurationAttributes, requestParameters) if (currentSamlConfiguration == None): print "Saml. Prepare for step 1. Client saml configuration is invalid" return False saml_response_array = requestParameters.get("SAMLResponse") if ArrayHelper.isEmpty(saml_response_array): print "Saml. Authenticate for step 1. saml_response is empty" return False saml_response = saml_response_array[0] print "Saml. Authenticate for step 1. saml_response: '%s'" % saml_response samlResponse = Response(currentSamlConfiguration) samlResponse.loadXmlFromBase64(saml_response) saml_validate_response = True if (configurationAttributes.containsKey("saml_validate_response")): saml_validate_response = StringHelper.toBoolean( configurationAttributes.get( "saml_validate_response").getValue2(), False) if (saml_validate_response): if (not samlResponse.isValid()): print "Saml. Authenticate for step 1. saml_response isn't valid" saml_response_name_id = samlResponse.getNameId() if (StringHelper.isEmpty(saml_response_name_id)): print "Saml. Authenticate for step 1. saml_response_name_id is invalid" return False print "Saml. Authenticate for step 1. saml_response_name_id: '%s'" % saml_response_name_id saml_response_attributes = samlResponse.getAttributes() print "Saml. Authenticate for step 1. attributes: '%s'" % saml_response_attributes # Use persistent Id as saml_user_uid saml_user_uid = saml_response_name_id if (saml_map_user): # Use mapping to local IDP user print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml: '%s'" % saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute( "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" print "Saml. Authenticate for step 1. Setting count steps to 2" context.set("saml_count_login_steps", 2) context.set("saml_user_uid", saml_user_uid) return True found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name: '%s'" % found_user_name user_authenticated = authenticationService.authenticate( found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result: '%s'" % post_login_result return post_login_result elif (saml_enroll_user): # Use auto enrollment to local IDP print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml: '%s'" % saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute( "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): # Auto user enrollemnt print "Saml. Authenticate for step 1. There is no user in LDAP. Adding user to local LDAP" # Convert saml result attributes keys to lover case saml_response_normalized_attributes = HashMap() for saml_response_attribute_entry in saml_response_attributes.entrySet( ): saml_response_normalized_attributes.put( StringHelper.toLowerCase( saml_response_attribute_entry.getKey()), saml_response_attribute_entry.getValue()) currentAttributesMapping = self.prepareCurrentAttributesMapping( self.attributesMapping, configurationAttributes, requestParameters) print "Saml. Authenticate for step 1. Using next attributes mapping '%s'" % currentAttributesMapping newUser = User() # Set custom object classes if self.userObjectClasses != None: print "Saml. Authenticate for step 1. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList( self.userObjectClasses) newUser.setCustomObjectClasses(self.userObjectClasses) for attributesMappingEntry in currentAttributesMapping.entrySet( ): idpAttribute = attributesMappingEntry.getKey() localAttribute = attributesMappingEntry.getValue() if self.debugEnrollment: print "Saml. Authenticate for step 1. Trying to map '%s' into '%s'" % ( idpAttribute, localAttribute) localAttributeValue = saml_response_normalized_attributes.get( idpAttribute) if (localAttributeValue != None): if self.debugEnrollment: print "Saml. Authenticate for step 1. Setting attribute '%s' value '%s'" % ( localAttribute, localAttributeValue) newUser.setAttribute(localAttribute, localAttributeValue) newUser.setAttribute("oxExternalUid", "saml:" + saml_user_uid) print "Saml. Authenticate for step 1. Attempting to add user '%s' with next attributes: '%s'" % ( saml_user_uid, newUser.getCustomAttributes()) user_unique = self.checkUserUniqueness(newUser) if not user_unique: print "Saml. Authenticate for step 1. Failed to add user: '******'. User not unique" % newUser.getAttribute( "uid") facesMessages = FacesMessages.instance() facesMessages.add( StatusMessage.Severity.ERROR, "Failed to enroll. User with same key attributes exist already" ) FacesContext.getCurrentInstance().getExternalContext( ).getFlash().setKeepMessages(True) return False find_user_by_uid = userService.addUser(newUser, True) print "Saml. Authenticate for step 1. Added new user with UID: '%s'" % find_user_by_uid.getUserId( ) found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name: '%s'" % found_user_name user_authenticated = authenticationService.authenticate( found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user: '******'" % found_user_name return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result: '%s'" % post_login_result return post_login_result elif (saml_enroll_all_user_attr): print "Saml. Authenticate for step 1. Attempting to find user by oxExternalUid: saml:" + saml_user_uid # Check if the is user with specified saml_user_uid find_user_by_uid = userService.getUserByAttribute( "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" user = User() # Set custom object classes if self.userObjectClasses != None: print "Saml. Authenticate for step 1. User custom objectClasses to add persons: '%s'" % Util.array2ArrayList( self.userObjectClasses) user.setCustomObjectClasses(self.userObjectClasses) customAttributes = ArrayList() for key in saml_response_attributes.keySet(): ldapAttributes = attributeService.getAllAttributes() for ldapAttribute in ldapAttributes: saml2Uri = ldapAttribute.getSaml2Uri() if (saml2Uri == None): saml2Uri = attributeService.getDefaultSaml2Uri( ldapAttribute.getName()) if (saml2Uri == key): attribute = CustomAttribute( ldapAttribute.getName()) attribute.setValues(attributes.get(key)) customAttributes.add(attribute) attribute = CustomAttribute("oxExternalUid") attribute.setValue("saml:" + saml_user_uid) customAttributes.add(attribute) user.setCustomAttributes(customAttributes) if (user.getAttribute("sn") == None): attribute = CustomAttribute("sn") attribute.setValue(saml_user_uid) customAttributes.add(attribute) if (user.getAttribute("cn") == None): attribute = CustomAttribute("cn") attribute.setValue(saml_user_uid) customAttributes.add(attribute) user_unique = self.checkUserUniqueness(user) if not user_unique: print "Saml. Authenticate for step 1. Failed to add user: '******'. User not unique" % newUser.getAttribute( "uid") facesMessages = FacesMessages.instance() facesMessages.add( StatusMessage.Severity.ERROR, "Failed to enroll. User with same key attributes exist already" ) FacesContext.getCurrentInstance().getExternalContext( ).getFlash().setKeepMessages(True) return False find_user_by_uid = userService.addUser(user, True) print "Saml. Authenticate for step 1. Added new user with UID: '%s'" % find_user_by_uid.getUserId( ) found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name: '%s'" % found_user_name user_authenticated = authenticationService.authenticate( found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result: '%s'" % post_login_result return post_login_result else: # Check if the is user with specified saml_user_uid print "Saml. Authenticate for step 1. Attempting to find user by uid: '%s'" % saml_user_uid find_user_by_uid = userService.getUser(saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 1. Failed to find user" return False found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 1. found_user_name: '%s'" % found_user_name user_authenticated = authenticationService.authenticate( found_user_name) if (user_authenticated == False): print "Saml. Authenticate for step 1. Failed to authenticate user" return False print "Saml. Authenticate for step 1. Setting count steps to 1" context.set("saml_count_login_steps", 1) post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 1. post_login_result: '%s'" % post_login_result return post_login_result elif (step == 2): print "Saml. Authenticate for step 2" sessionAttributes = context.get("sessionAttributes") if (sessionAttributes == None ) or not sessionAttributes.containsKey("saml_user_uid"): print "Saml. Authenticate for step 2. saml_user_uid is empty" return False saml_user_uid = sessionAttributes.get("saml_user_uid") passed_step1 = StringHelper.isNotEmptyString(saml_user_uid) if (not passed_step1): return False credentials = Identity.instance().getCredentials() user_name = credentials.getUsername() user_password = credentials.getPassword() logged_in = False if (StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password)): logged_in = userService.authenticate(user_name, user_password) if (not logged_in): return False # Check if there is user which has saml_user_uid # Avoid mapping Saml account to more than one IDP account find_user_by_uid = userService.getUserByAttribute( "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): # Add saml_user_uid to user one id UIDs find_user_by_uid = userService.addUserAttribute( user_name, "oxExternalUid", "saml:" + saml_user_uid) if (find_user_by_uid == None): print "Saml. Authenticate for step 2. Failed to update current user" return False post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 2. post_login_result: '%s'" % post_login_result return post_login_result else: found_user_name = find_user_by_uid.getUserId() print "Saml. Authenticate for step 2. found_user_name: '%s'" % found_user_name if StringHelper.equals(user_name, found_user_name): post_login_result = self.samlExtensionPostLogin( configurationAttributes, find_user_by_uid) print "Saml. Authenticate for step 2. post_login_result: '%s'" % post_login_result return post_login_result return False else: return False
def applications(client, systemOSH, databasesOSH, OSHVResult, schemaName=None, viewSchemaName=None): #query information about Oracle Applications products at your site mapIdToOSH = HashMap() objs = __assocWithSchemaName( ['FND_PRODUCT_INSTALLATIONS', 'FND_APPLICATION_VL'], schemaName, viewSchemaName) resultSet = client.executeQuery( 'SELECT * FROM %s P,%s V WHERE V.APPLICATION_ID = P.APPLICATION_ID' % objs) #@@CMD_PERMISION sql protocol execution while resultSet.next(): id = resultSet.getString(1) version = resultSet.getString(8) status = resultSet.getString(9) tablespace = resultSet.getString(11) indexTablespace = resultSet.getString(12) tempTablespace = resultSet.getString(13) sizing = resultSet.getString(14) patchSet = resultSet.getString(17) shortName = resultSet.getString(20) basePath = resultSet.getString(26) description = resultSet.getString(27) if patchSet == None: patchSet = '' if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug('id = ', id) logger.debug('version = ', version) logger.debug('status = ', status) logger.debug('tablespace = ', tablespace) logger.debug('indexTablespace = ', indexTablespace) logger.debug('tempTablespace = ', tempTablespace) logger.debug('sizing = ', sizing) logger.debug('patchSet = ', patchSet) logger.debug('shortName = ', shortName) logger.debug('basepath = ', basePath) logger.debug('description = ', description) logger.debug('-------------------------------------------------') appOSH = ObjectStateHolder('oracleapplication') appOSH.setAttribute('data_name', id) if version != None: appOSH.setAttribute('oracleapplication_version', version) appOSH.setAttribute('oracleapplication_status', getStatusString(status)) if tablespace != None: appOSH.setAttribute('oracleapplication_tablespace', tablespace) if indexTablespace != None: appOSH.setAttribute('oracleapplication_indextablespace', indexTablespace) if tempTablespace != None: appOSH.setAttribute('oracleapplication_temptablespace', tempTablespace) if sizing != None: appOSH.setAttribute('oracleapplication_sizing', int(sizing)) if patchSet != None: appOSH.setAttribute('oracleapplication_patchset', patchSet) if shortName != None: appOSH.setAttribute('oracleapplication_shortname', shortName) if basePath != None: appOSH.setAttribute('oracleapplication_basepath', basePath) if description != None: appOSH.setAttribute('oracleapplication_description', description) appOSH.setContainer(systemOSH) OSHVResult.add(appOSH) mapIdToOSH.put(id, appOSH) if databasesOSH != None: linkTablespace(appOSH, databasesOSH, tablespace, OSHVResult) linkTablespace(appOSH, databasesOSH, indexTablespace, OSHVResult) linkTablespace(appOSH, databasesOSH, tempTablespace, OSHVResult) # build application dependencies resultSet.close() objs = __assocWithSchemaName(['FND_PRODUCT_DEPENDENCIES'], schemaName, viewSchemaName) resultSet = client.executeQuery( 'SELECT APPLICATION_ID,REQUIRED_APPLICATION_ID FROM %s' % objs) #@@CMD_PERMISION sql protocol execution while resultSet.next(): id = resultSet.getString(1) requiredId = resultSet.getString(2) appOSH = mapIdToOSH.get(id) requiredAppOSH = mapIdToOSH.get(requiredId) if appOSH != None and requiredAppOSH != None: dependOSH = modeling.createLinkOSH('depend', appOSH, requiredAppOSH) OSHVResult.add(dependOSH) else: logger.debug('Applications for ids [', id, '] and/or [', requiredId, '] are not found') resultSet.close() return mapIdToOSH
def services(client, mapAppIdToOSH, hostToServerOSH, nameToHostOSH, systemOSH, OSHVResult, schemaName=None, viewSchemaName=None): mapManagerToOSH = HashMap() mapServiceToOSH = HashMap() #we have no way for now to obtain processes names, and pid is not interesting for us #so we don't want to create processes (and can't, since process name is key_attribute) #mapProcessIDToOSH = getProcesses(client,nameToHostOSH, OSHVResult) mapProcessIDToOSH = HashMap() objs = __assocWithSchemaName( ['FND_CONCURRENT_QUEUES_VL', 'FND_CP_SERVICES_VL'], schemaName, viewSchemaName) resultSet = client.executeQuery( 'SELECT Q.CONCURRENT_QUEUE_ID,Q.APPLICATION_ID,Q.CONCURRENT_QUEUE_NAME,Q.MAX_PROCESSES,Q.RUNNING_PROCESSES,Q.TARGET_NODE,Q.USER_CONCURRENT_QUEUE_NAME,Q.DESCRIPTION,S.SERVICE_NAME,S.DESCRIPTION FROM %s Q, %s S WHERE Q.MANAGER_TYPE = S.SERVICE_ID' % objs) #@@CMD_PERMISION sql protocol execution while resultSet.next(): queueId = resultSet.getString(1) appId = resultSet.getString(2) name = resultSet.getString(3) maxProcesses = resultSet.getString(4) runningProcesses = resultSet.getString(5) host = resultSet.getString(6) displayName = resultSet.getString(7) description = resultSet.getString(8) managerName = resultSet.getString(9) managerDescription = resultSet.getString(10) if managerName == None: continue managerOSH = mapManagerToOSH.get(managerName) if managerOSH == None: managerOSH = ObjectStateHolder('oracleappservicemanager') managerOSH.setAttribute('data_name', managerName) if managerDescription != None: managerOSH.setAttribute('oracleappservicemanager_description', managerDescription) managerOSH.setContainer(systemOSH) mapManagerToOSH.put(managerName, managerOSH) OSHVResult.add(managerOSH) if description == None: description = '' if logger.isDebugEnabled(): logger.debug('-------------------------------------------------') logger.debug('name = ', name) logger.debug('displayName = ', displayName) logger.debug('appId = ', appId) logger.debug('description = ', description) if host != None: logger.debug('host = ', host) logger.debug('-------------------------------------------------') appOSH = mapAppIdToOSH.get(appId) serverOSH = hostToServerOSH.get(host) if appOSH != None: if (name == None) and (displayName != None): name = displayName if name != None: serviceOSH = ObjectStateHolder('oracleappservice') serviceOSH.setAttribute('data_name', name) serviceOSH.setAttribute('oracleappservice_displayname', displayName) serviceOSH.setAttribute('oracleappservice_description', description) serviceOSH.setAttribute('oracleappservice_maxprocesses', int(maxProcesses)) serviceOSH.setAttribute('oracleappservice_runningprocesses', int(runningProcesses)) serviceOSH.setContainer(appOSH) OSHVResult.add(serviceOSH) mapServiceToOSH.put(name, serviceOSH) processes = mapProcessIDToOSH.get(queueId) if processes != None: logger.debug('Found processes for service [', name, ']') itProcesses = processes.iterator() while itProcesses.hasNext(): processOSH = itProcesses.next() resourceOSH = modeling.createLinkOSH( 'resource', serviceOSH, processOSH) OSHVResult.add(resourceOSH) else: logger.debug('No processes found for service [', name, ']') if managerOSH != None: memberOSH = modeling.createLinkOSH('member', managerOSH, serviceOSH) OSHVResult.add(memberOSH) if serverOSH != None: deployedOSH = modeling.createLinkOSH( 'deployed', serverOSH, serviceOSH) OSHVResult.add(deployedOSH) else: logger.debug('Server not found for host [', host, ']') resultSet.close()