コード例 #1
0
ファイル: targets.py プロジェクト: lbjay/montysolr
def get_recids_changes(message):
    """Retrieves the recids of the last changed documents"""
    last_recid = None
    table = 'bibrec'
    if message.getParam("table"):
        table = str(message.getParam("table"))
    if message.getParam("last_recid"):
        #last_recid = int(Integer.cast_(message.getParam("last_recid")).intValue())
        last_recid = int(str(message.getParam("last_recid")))
    mod_date = None
    if message.getParam("mod_date"):
        mod_date = str(message.getParam("mod_date"))
    max_records = 10000
    if message.getParam('max_records'):
        max_records = int(Integer.cast_(message.getParam("max_records")).intValue())
    if last_recid and last_recid == -1:
        mod_date = None
    (wid, results) = api_calls.dispatch("get_recids_changes", last_recid, max_recs=max_records, 
                                        mod_date=mod_date, table=table)
    if results:
        data, last_recid, mod_date = results
        out = HashMap() #.of_(String, JArray_int)
        for k,v in data.items():
            out.put(k, JArray_int(v))
        message.setResults(out)
        message.setParam('mod_date', mod_date)
        message.setParam('last_recid', last_recid)
コード例 #2
0
def save_and_get_complex():
	'''Test saving entities containing mapped collection properties'''
	entity1 = TestEntities.ComplexEntity()
	entity1.setId("complex1")
	strings = ArrayList()
	strings.add("one")
	strings.add("two")
	entity1.setStringList(strings)
	ints = HashSet()
	ints.add(1)
	ints.add(2)
	entity1.setIntSet(ints)
	extended = HashMap()
	extended.put("prop1", "one")
	extended.put("prop2", "two")
	entity1.setExtendedProps(extended)
	
	service = EntityService(TestEntities.ComplexEntity)
	service.save(entity1)
	
	entity2 = service.get("complex1")
	assertNotNull(entity2)
	assertEquals(entity2.getId(), entity1.getId())
	assertTrue(entity2.getStringList().contains("one"))
	assertTrue(entity2.getStringList().contains("two"))
	assertTrue(entity2.getIntSet().contains(java.lang.Long(1)))
	assertTrue(entity2.getIntSet().contains(java.lang.Long(2)))
	assertNotNull(entity2.getExtendedProps())
	assertEquals(entity2.getExtendedProps().get("prop1"), "one")
	assertEquals(entity2.getExtendedProps().get("prop2"), "two")
コード例 #3
0
ファイル: SqlServerConfig.py プロジェクト: ddonnelly19/dd-git
 def discoverPlans(self,oshv,sqlServerId,dbs):
     logger.debug("going to get jobs and plans")
     if self.discoveryOptions and self.discoveryOptions.discoverSqlJob:
         jobById=self.getSqlJobs(oshv, sqlServerId)
     else:
         jobById=HashMap()
     rs = self.connection.getTable(self.plansQuery)
     plans = HashMap()
     while(rs.next()):
         name = rs.getString('plan_name')
         id = rs.getString('plan_id')
         osh = ObjectStateHolder('sqlservermaintenanceplan')
         osh.setAttribute(Queries.DATA_NAME,name)
         osh.setAttribute('planId',id)
         osh.setContainer(sqlServerId)
         oshv.add(osh)
         if self.discoveryOptions and self.discoveryOptions.discoverDbUser:
             owner = rs.getString('owner')
             # Some plans may not have an owner so we need to check
             if owner:
                 user = ObjectStateHolder('dbuser')
                 user.setAttribute(Queries.DATA_NAME,owner)
                 user.setContainer(sqlServerId)
                 oshv.add(user)
                 oshv.add(modeling.createLinkOSH('owner',user,osh))
         plans.put(name,osh)
     rs.close()
     logger.debug("got plans: ", plans.keySet().toString())
     self.discoverPlanJobs(oshv,sqlServerId,plans,jobById)
     self.discoverPlanDbs(oshv,plans,dbs)
コード例 #4
0
def info(dspath):

    # get datastore
    shp = "file://%s" % os.path.abspath(dspath)
    params = HashMap()
    params.put("url", net.URL(shp))
    dataStore = DataStoreFinder.getDataStore(params)

    typeName = dataStore.getTypeNames()[0]
    featureSource = dataStore.getFeatureSource(typeName)
    featureCollection = featureSource.getFeatures()
    featureType = featureSource.getSchema()

    # Print out feature source info (ie layer info)
    #'getBounds', 'getClass', 'getCount', 'getDataStore'
    print "Datastore         : ", featureSource.getDataStore()
    print "Layer Name        : ", typeName
    print "Number of features: ", featureCollection.count
    print "Bounding Box      : ", featureSource.getBounds()

    # Print out feature attribute types
    for atype in featureType.getAttributeTypes():
        print atype.getName(), atype.getType()

    return None

    f_iter = featureCollection.iterator()
    while f_iter.hasNext():
        feat = f_iter.next()
        print "==============", feat.getID()
        for i in range(feat.getNumberOfAttributes()):
            print "\t %s " % feat.getAttribute(i)

    featureCollection.close(f_iter)
コード例 #5
0
ファイル: indexers.py プロジェクト: coady/lupyne
 def parse(self, query, field="", op="", version=None, parser=None, **attrs):
     """Return parsed lucene Query.
     
     :param query: query string
     :param field: default query field name, sequence of names, or boost mapping
     :param op: default query operator ('or', 'and')
     :param version: lucene Version
     :param parser: custom PythonQueryParser class
     :param attrs: additional attributes to set on the parser
     """
     # parsers aren't thread-safe (nor slow), so create one each time
     cls = (
         queryparser.classic.QueryParser
         if isinstance(field, basestring)
         else queryparser.classic.MultiFieldQueryParser
     )
     args = field, self
     if isinstance(field, collections.Mapping):
         boosts = HashMap()
         for key in field:
             boosts.put(key, Float(field[key]))
         args = list(field), self, boosts
     parser = (parser or cls)(version or util.Version.LATEST, *args)
     if op:
         parser.defaultOperator = getattr(queryparser.classic.QueryParser.Operator, op.upper())
     for name, value in attrs.items():
         setattr(parser, name, value)
     if isinstance(parser, queryparser.classic.MultiFieldQueryParser):
         return parser.parse(parser, query)
     try:
         return parser.parse(query)
     finally:
         if isinstance(parser, PythonQueryParser):
             parser.finalize()
コード例 #6
0
ファイル: organiser.py プロジェクト: Deakin/the-fascinator
 def __getRvtManifest(self, manifest):
     rvtMap = HashMap()
     rvtMap.put("title", manifest.getTitle())
     rvtMap.put("toc", self.__getRvtNodes(manifest.getTopNodes()))
     json = JsonObject(rvtMap)
     #print json.toString()
     return json.toString()
コード例 #7
0
ファイル: loadConfig.py プロジェクト: KeithLatteri/awips2
def getGlobals(mod):
    mp = HashMap()
    for attrName in mod.__dict__:        
        if not attrName.startswith('__'):
            attr = mod.__getattribute__(attrName)
            t = type(attr)
            if t is not list:
                if t is str:
                    mp.put(attrName, attr)
                elif t is int:
                    mp.put(attrName, Integer(attr))
                elif t is float:
                    mp.put(attrName, Float(attr))
                elif t is bool:
                    mp.put(attrName, Boolean(attr))
            else:
                arr = None                
                if len(attr) > 0:
                    t = type(attr[0])
                    if t is int:        
                        arr = __fillArray(attr, Integer)
                    elif t is float:
                        arr = __fillArray(attr, Float)
                    elif t is str:                        
                        arr = __fillArray(attr, String)
                mp.put(attrName, arr)
    return mp
コード例 #8
0
ファイル: mainLib.py プロジェクト: PhilMarsh/Hemskies-App
	def setBuddyVariables(self, user, varDict):
		hashMap = HashMap()
		
		for key, value in varDict.items():
			hashMap.put(key, value)
		
		self._helper.setBuddyVariables( user, hashMap )
コード例 #9
0
ファイル: organiser.py プロジェクト: Deakin/the-fascinator
 def __getRvtNodes(self, manifest):
     rvtNodes = ArrayList()
     #print "manifest=%s" % manifest
     for node in manifest:
         package = False
         try:
             # add the node
             rvtNode = HashMap()
             if not node.getHidden():
                 oid = node.getId()
                 # check if node is a package
                 if oid != "blank":
                     package = (self.__getContentType(oid) == "application/x-fascinator-package")
                 else:
                     oid = node.getKey().replace("node", "blank")
                 rvtNode.put("visible", True)
                 rvtNode.put("title", node.getTitle())
                 if package:
                     subManifest = self.__readManifest(oid)
                     if subManifest is not None:
                         rvtNode.put("children", self.__getRvtNodes(subManifest.getTopNodes()))
                     oid = node.getKey().replace("node", "package")
                 else:
                     rvtNode.put("children", self.__getRvtNodes(node.getChildren()))
                 rvtNode.put("relPath", oid)
                 rvtNodes.add(rvtNode)
         except Exception, e:
             self.log.error("Failed to process node '%s': '%s'" % (node.toString(), str(e)))
コード例 #10
0
ファイル: basalganglia.py プロジェクト: Elhamahm/nengo_1.4
def make(net,name='Basal Ganglia', dimensions=1, neurons=100, pstc=0.01, netbg=None, same_neurons=True, tau_ampa=0.002, tau_gaba=0.008, radius=1.5):

    if netbg is None:
        netbg=nef.Network(name)
    input=netbg.make('input',1,dimensions,quick=True,mode='direct')
    output=netbg.make('output',1,dimensions,quick=True,mode='direct')
    nps.basalganglia.make_basal_ganglia(netbg,input,output, dimensions=dimensions, neurons=neurons, same_neurons=same_neurons, tau_ampa=0.002, tau_gaba=0.008, radius=radius)

    input.addDecodedTermination('input',numeric.eye(dimensions),pstc,False)
    netbg.network.exposeTermination(input.getTermination('input'),'input')
    netbg.network.exposeOrigin(output.getOrigin('X'),'output')
    
    if net is not None:
        net.add(netbg.network)
    
        if net.network.getMetaData("BasalGanglia") == None:
            net.network.setMetaData("BasalGanglia", HashMap())
        bgs = net.network.getMetaData("BasalGanglia")

        bg=HashMap(5)
        bg.put("name", name)
        bg.put("dimensions", dimensions)
        bg.put("neurons", neurons)
        bg.put("pstc", pstc)
        bg.put("same_neurons", same_neurons)

        bgs.put(name, bg)

        if net.network.getMetaData("templates") == None:
            net.network.setMetaData("templates", ArrayList())
        templates = net.network.getMetaData("templates")
        templates.add(name)

    return netbg
コード例 #11
0
ファイル: json_ims.py プロジェクト: kiranba/the-fascinator
 def __getJson(self):
     rvtMap = HashMap()
     try:
         oid = formData.get("oid")
         object = Services.storage.getObject(oid)
         payload = object.getPayload("imsmanifest.xml")
         try:
             from xml.etree import ElementTree
             xmlStr = IOUtils.toString(payload.open(), "UTF-8")
             payload.close()
             xml = ElementTree.XML(xmlStr.encode("UTF-8"))
             ns = xml.tag[:xml.tag.find("}")+1]
             resources = {}
             for res in xml.findall(ns+"resources/"+ns+"resource"):
                 resources[res.attrib.get("identifier")] = res.attrib.get("href")
             organizations = xml.find(ns+"organizations")
             defaultName = organizations.attrib.get("default")
             organizations = organizations.findall(ns+"organization")
             organizations = [o for o in organizations if o.attrib.get("identifier")==defaultName]
             organization = organizations[0]
             title = organization.find(ns+"title").text
             rvtMap.put("title", title)
             items = organization.findall(ns+"item")
             rvtMap.put("toc", self.__getJsonItems(ns, items, resources))
         except Exception, e:
              data["error"] = "Error - %s" % str(e)
              print data["error"]
         object.close()
コード例 #12
0
def getUduid(client):
    OPTION_UD_UNIQUE_ID = "UD_UNIQUE_ID"
    try:
        uduid = None
        try:
            clientOptions = client.getOptionsMap()
            uduid = clientOptions.get(OPTION_UD_UNIQUE_ID)
            logger.debug("Get uduid from client:", uduid)
        except:
            logger.debug("Can't get uduid from client")
            pass

        if not uduid:
            from java.util import UUID
            uduid = UUID.randomUUID()
            logger.debug("Generated uduid:", uduid)

            from java.util import HashMap
            options = HashMap()
            options.put(OPTION_UD_UNIQUE_ID, str(uduid))
            client.setOptionsMap(options)
            clientOptions = client.getOptionsMap()
            #Get the value again to make sure the new value was set to client
            uduid = clientOptions.get(OPTION_UD_UNIQUE_ID)

        logger.debug("Final value of uduid:", uduid)
        return uduid
    except:
        return None
コード例 #13
0
ファイル: settings.py プロジェクト: kiranba/the-fascinator
 def getFacetDisplays(self):
     facetDisplays = self.__portal.getMap("portal/facet-displays")
     if facetDisplays is None or facetDisplays.isEmpty():
         facetDisplays = HashMap()
         facetDisplays.put("list", "List menu")
         facetDisplays.put("tree", "Dynamic tree")
     return facetDisplays
コード例 #14
0
    def updateLocalRecordRelations(self, jobItems):
        oidIdentifierMap = HashMap()
        for jobItem in jobItems:
            oidIdentifierMap.put(jobItem.get("oid"),jobItem.get("required_identifiers")[0].get("identifier"))
            
        for jobItem in jobItems:
            type = jobItem.get("type");
            targetSystem = self.systemConfig.getString(None, "curation", "supported-types", type);
            if targetSystem == "redbox":
                oid = jobItem.get("oid")
                digitalObject = StorageUtils.getDigitalObject(self.services.getStorage(), oid)
                tfPackagePid = self.getPackageData(digitalObject)
                metadataJsonPayload = digitalObject.getPayload(tfPackagePid)
                metadataJsonInstream = metadataJsonPayload.open()
                metadataJson = JsonSimple(metadataJsonInstream)
                metadataJsonPayload.close()
                relationships = metadataJson.getArray("relationships")
                if relationships is not None:
                    for relationship in relationships:
			system = relationship.get("system")
                        if system != "redbox" or system != None:
			    url = self.systemConfig.getString("can't find it", "curation","external-system-urls","get-oid-for-identifier",system)
                            client = BasicHttpClient(url+ "&identifier="+relationship.get("identifier"))
                            get = GetMethod(url+ "&identifier="+relationship.get("identifier"))
                            client.executeMethod(get)
                            if get.getStatusCode() == 200:
                                response = JsonSimple(get.getResponseBodyAsString())
                                relationship.put("curatedPid",oidIdentifierMap.get(response.getString(None,"oid")))
                                relationship.put("isCurated",True)
                            
                            #Now update the relationship on Mint's side
                            break
                    
        istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes())
        StorageUtils.createOrUpdatePayload(digitalObject,tfPackagePid,istream)
コード例 #15
0
 def getPublishersFromDistributor(self,oshv,distributor, distributorDatabaseName,sqlServerId):
     #check if i am a distributor first
     rs = self.connection.doCall('exec sp_helpdistpublisher')
     publishers = HashMap()
     sqlServers = HashMap()
     while(rs.next()):
         publisherName = rs.getString('name')
         publisher = ObjectStateHolder('sqlserverpublisher')
         sqlServer = self.createSqlServer(publisherName,oshv,sqlServerId)
         publisher.setContainer(sqlServer)
         publisher.setAttribute(Queries.DATA_NAME,publisherName)
         publishers.put(publisherName,publisher)
         sqlServers.put(publisherName,sqlServer)
         oshv.add(sqlServer)
         oshv.add(publisher)
         oshv.add(modeling.createLinkOSH('dblink',publisher,distributor))
         #add the dblink between the distributor and the publisher                                    
     rs.close()
     if(publishers.size() == 0):
         return
     #for each publisher get the published dbs
     workingDatabase = self.connection.getWorkingDatabase()
     self.connection.setWorkingDatabase(distributorDatabaseName)
     itr = publishers.keySet().iterator()
     while (itr.hasNext()):
         publisherName = itr.next()
         publisher = publishers.get(publisherName)
         sqlServer = sqlServers.get(publisherName)
         self.getPublications(publisherName,sqlServer,publisher,oshv,sqlServerId)
             
     self.connection.setWorkingDatabase(workingDatabase)
コード例 #16
0
ファイル: GameFlow.py プロジェクト: hobson/ggpy
 def solveTurns(self, model):
     """ generated source for method solveTurns """
     # Before we can do anything else, we need a topological ordering on our forms
     ordering = getTopologicalOrdering(model.getIndependentSentenceForms(), model.getDependencyGraph())
     ordering.retainAll(self.formsControlledByFlow)
     # Let's add function info to the consideration...
     functionInfoMap = HashMap()
     for form in constantForms:
         functionInfoMap.put(form, FunctionInfoImpl.create(form, self.constantChecker))
     # First we set the "true" values, then we get the forms controlled by the flow...
     # Use "init" values
     trueFlowSentences = HashSet()
     for form in constantForms:
         if form.__name__ == self.INIT:
             for initSentence in constantChecker.getTrueSentences(form):
                 trueFlowSentences.add(trueSentence)
     # Go through ordering, adding to trueFlowSentences
     addSentenceForms(ordering, trueFlowSentences, model, functionInfoMap)
     self.sentencesTrueByTurn.add(trueFlowSentences)
     while True:
         # Now we use the "next" values from the previous turn
         trueFlowSentences = HashSet()
         for sentence in sentencesPreviouslyTrue:
             if sentence.__name__ == self.NEXT:
                 trueFlowSentences.add(trueSentence)
         addSentenceForms(ordering, trueFlowSentences, model, functionInfoMap)
         # Test if this turn's flow is the same as an earlier one
         while i < len(self.sentencesTrueByTurn):
             if prevSet == trueFlowSentences:
                 # Complete the loop
                 self.turnAfterLast = i
                 break
             i += 1
         self.sentencesTrueByTurn.add(trueFlowSentences)
コード例 #17
0
ファイル: cloud.py プロジェクト: piyush76/EMS
def findMessages(mc,custid,count):
    print 'finding ' + str(count) + ' messages...'
    amsm = mc.getActiveMailboxStoreManager()
    msgs = HashMap()
    retries = 10 # 10 minutes
    while msgs.size() < count and retries > 0:
        sleep(60)
        retries = retries - 1
        
        for p in mc.getPartitionManager().listPartitions():
            if p.isReadOnly():
                continue
            print 'searching for messages to be stored in',p
            for msg in amsm.findMessages([SearchConstraint(IActiveMailboxStoreManager.PROP_CUST_ID, SearchConstraintOperator.CONSTRAINT_EQUALS,int(custid))],p,True):
                msgs.put(msg.getMessageId(), msg)	
        
        print 'found',msgs.size(),'messages',msgs.keySet()

    if msgs.isEmpty():
        print 'Failed to find any messages in DB'
        raise Exception('Failed to find any messages in DB')

    if msgs.size() < count:
        print 'Warning, did not find all messages expected'

    return msgs.values()
コード例 #18
0
ファイル: sapappsutils.py プロジェクト: ddonnelly19/dd-git
	def getApplicationHierarchy(self):
		appsProps = []
		params = HashMap()
		params.put('OBJECT_TYPE', 'FUGR')
		params.put('REFRESH', 'X')
#		function = self.__client.getFunction('RS_COMPONENT_VIEW')
#		function.getImportParameterList().setValue("FUGR", "OBJECT_TYPE");
#		function.getImportParameterList().setValue("X", "REFRESH");
		
		fields = ArrayList()
		fields.add('ID')
		fields.add('TYPE')
		fields.add('NAME')
		fields.add('PARENT')
		fields.add('TEXT')
		
		appsRS = self.__client.executeFunction('RS_COMPONENT_VIEW', params, 'NODETAB', fields)
		
		while appsRS.next():
			prop = Properties()
			prop.setProperty('id', appsRS.getString("ID"))
			prop.setProperty('type', appsRS.getString("TYPE"))
			prop.setProperty('data_name', appsRS.getString("NAME"))
			prop.setProperty('parent', appsRS.getString("PARENT"))
			prop.setProperty('description', appsRS.getString("TEXT"))
			appsProps.append(prop)

		return appsProps;
コード例 #19
0
ファイル: createvendor.py プロジェクト: Adoyser/NGECore2
def run(core, actor, target, commandString):
	
	if actor.getCombatFlag() > 0:
		return
	
	cell = actor.getContainer()
	building = actor.getGrandparent()
	ghost = actor.getSlottedObject('ghost')
		
	if not ghost or not cell or not building or not core.housingService.getPermissions(actor, cell):
		return
		
	if ghost.getAmountOfVendors() >= actor.getSkillModBase('manage_vendor'):
		actor.sendSystemMessage('@player_structure:full_vendors', 0)
		return
	
	suiOptions = HashMap()
	suiOptions.put(Long(1), '@player_structure:terminal')
	suiOptions.put(Long(2), '@player_structure:droid')
	# TODO add creatures
	window = core.suiService.createListBox(ListBoxType.LIST_BOX_OK_CANCEL, '@player_structure:vendor_type_t', '@player_structure:vendor_type_d', suiOptions, actor, None, 5)
	returnList = Vector()
	returnList.add('List.lstList:SelectedRow')
	window.addHandler(0, '', Trigger.TRIGGER_OK, returnList, handleFirstWindow)
	core.suiService.openSUIWindow(window)
	return
コード例 #20
0
ファイル: GdlPool.py プロジェクト: hobson/ggpy
 def drainPool(cls):
     """ generated source for method drainPool """
     cls.distinctPool.clear()
     cls.functionPool.clear()
     cls.notPool.clear()
     cls.orPool.clear()
     cls.propositionPool.clear()
     cls.relationPool.clear()
     cls.rulePool.clear()
     cls.variablePool.clear()
     with lock_for_object(cls.variableCases):
         cls.variableCases.clear()
     #  When draining the pool between matches, we still need to preserve the keywords
     #  since there are global references to them. For example, the Prover state machine
     #  has a reference to the GdlConstant "true", and that reference must still point
     #  to the authoritative GdlConstant "true" after the pool is drained and another
     #  game has begun. As such, when draining the constant pool, these special keywords
     #  are set aside and returned to the pool after all of the other constants (which
     #  were game-specific) have been drained.
     keywordConstants = HashMap()
     for keyword in KEYWORDS:
         keywordConstants.put(keyword, GdlPool.getConstant(keyword))
     with lock_for_object(cls.constantCases):
         cls.constantPool.clear()
         cls.constantCases.clear()
         for keywordEntry in keywordConstants.entrySet():
             cls.constantCases.put(keywordEntry.getKey(), keywordEntry.getKey())
             cls.constantPool.put(keywordEntry.getKey(), keywordEntry.getValue())
コード例 #21
0
ファイル: facetTree.py プロジェクト: kiranba/the-fascinator
class FacetList:
    def __init__(self, name, json):
        self.__facetMap = HashMap()
        self.__facetList = ArrayList()
        entries = json.getList("facet_counts/facet_fields/" + name)
        for i in range(0, len(entries), 2):
            value = entries[i]
            count = entries[i+1]
            if count > 0:
                facet = Facet(name, value, count)
                self.__facetMap.put(value, facet)
                slash = value.rfind("/")
                if slash == -1:
                    self.__facetList.add(facet)
                else:
                    parent = self.__getFacet(value[:slash])
                    if parent is not None:
                        parent.addSubFacet(facet)
    
    def __getFacet(self, name):
        return self.__facetMap.get(name)
    
    def getJsonList(self):
        jsonList = ArrayList()
        for facets in self.__facetList:
            jsonList.add(facets.getJson())
        return jsonList
コード例 #22
0
ファイル: facetTree.py プロジェクト: kiranba/the-fascinator
class FacetList:
    def __init__(self, name, results):
        self.__facetMap = HashMap()
        self.__facetList = ArrayList()
        facets = results.getFacets()
        if facets is None:
            return
        facet = facets.get(name)
        if facet is None:
            return
        facetData = facet.values()
        for value in facetData.keySet():
            count = facetData.get(value)
            facet = Facet(name, value, count)
            self.__facetMap.put(value, facet)
            slash = value.rfind("/")
            if slash == -1:
                self.__facetList.add(facet)
            else:
                parent = self.__getFacet(value[:slash])
                if parent is not None:
                    parent.addSubFacet(facet)

    def __getFacet(self, name):
        return self.__facetMap.get(name)

    def getJsonList(self):
        jsonList = ArrayList()
        for facets in self.__facetList:
            jsonList.add(facets.getJson())
        return jsonList
コード例 #23
0
ファイル: ProverCache.py プロジェクト: hobson/ggpy
class ProverCache(object):
    """ generated source for class ProverCache """
    contents = Map()

    def __init__(self):
        """ generated source for method __init__ """
        self.contents = HashMap()

    # 
    # 	 * NOTE: The given sentence must have been renamed with a VariableRenamer.
    # 	 
    def contains(self, renamedSentence):
        """ generated source for method contains """
        return self.contents.containsKey(renamedSentence)

    def get(self, sentence, varRenamedSentence):
        """ generated source for method get """
        cacheContents = self.contents.get(varRenamedSentence)
        if cacheContents == None:
            return None
        results = HashSet()
        for answer in cacheContents:
            results.add(Unifier.unify(sentence, answer))
        return ArrayList(results)

    def put(self, sentence, renamedSentence, answers):
        """ generated source for method put """
        results = HashSet()
        for answer in answers:
            results.add(Substituter.substitute(sentence, answer))
        self.contents.put(renamedSentence, results)
コード例 #24
0
ファイル: userinfo.py プロジェクト: qcif/rdsi-arms
    def __constructInfoJson(self, username):
        """
            There are users managed by internal auth manager with no attributes
            There are users managed by external auth managers e.g. shibboleth who have attributes
            We put all available attributes of a user in to return value 
        """
        # print "Query username = %s" % username
        username = username.strip()

        authUserDao = ApplicationContextProvider.getApplicationContext().getBean("hibernateAuthUserDao")
        parameters = HashMap()
        parameters.put("username", username)
        userObjectList = authUserDao.query("getUser", parameters)

        # print "Returned object = %s" % str(userObjectList)
        # print "Returned size = %d" % userObjectList.size() 
        userJson = JsonObject()
        try:
            if userObjectList.size() > 0:
                # One hit will be enough to get user object
                userObj = userObjectList.get(0)
                attrbs = userObj.getAttributes()
                for attrb in attrbs.keySet():
#                     print "Attribute %s = %s) % (attrb, attrbs.get(attrb).getValStr())
                    userJson.put(attrb, attrbs.get(attrb).getValStr())
            else:
               # This should not be reached with external sourced users
                self.log.warn("Wrong username? Every user should have a record")
                userJson.put("userName", username) 
        except Exception, e:
            self.log.error("%s: cannot construct user attribute JSON, detail = %s" % (self.__class__.__name__ , str(e)))
            userJson.put("userName", username)
コード例 #25
0
ファイル: grantAccess.py プロジェクト: qcif/qcloud-arms
    def __constructUserJson(self, username):
        """
            There are users managed by internal auth manager with no attributes
            There are users managed by external auth manages e.g. shibboleth who have attributes
            These users username is not necessarily the same as there normal display name
            This function currently solves this issue by checking commonName attribute for shibboleth users  
        """
        username = username.strip()
        userJson = JsonObject()
        userJson.put("userName", username) 
        parameters = HashMap()
#         print "Checking user info for %s" % username
        parameters.put("username", username)
        userObjectList = self.authUserDao.query("getUser", parameters)
#         print "Returned size = %d" % userObjectList.size() 
        if userObjectList.size() > 0:
            userObject = userObjectList.get(0)
            #Check if this is a user with attributes?
            attrb = userObject.getAttributes().get("commonName")
            if attrb is None:
#                 print "We cannot find so called commonName, use %s instead" % username
                userJson.put("realName", username)
            else:
#                 print "We found so called commonName, use %s instead of %s" % (attrb.getValStr(), username)
                userJson.put("realName", attrb.getValStr().strip())
        else:
            # This should not be reached
            self.log.warn("What is going on here? why ends up here?")
            userJson.put("realName", username)
            
        return userJson
コード例 #26
0
def makeComponentGroups(client, appServerOSH, ip, OSHVResult, enterprise, siteOSH):
	mapGroupNameToOSH = HashMap()

	compgrpListing = client.executeCmd('list compgrps')#@@CMD_PERMISION siebel protocol execution
	cgTbl = siebel_common.makeTable(compgrpListing)
	# sample output
	# CG_NAME                                  CG_ALIAS    CG_DESC_TEXT                                        CG_DISP_ENABLE_ST  CG_NUM_COMP  SV_NAME  CA_RUN_STATE
	# ---------------------------------------  ----------  --------------------------------------------------  -----------------  -----------  -------  ------------
	# Assignment Management                    AsgnMgmt    Assignment Management Components                    Enabled            2            sblapp2  Online
	# Communications Management                CommMgmt    Communications Management Components                Enabled            7            sblapp2  Online
	# Content Center                           ContCtr     Content Center Components                           Enabled            2            sblapp2  Online
	# Enterprise Application Integration       EAI         Enterprise Application Integration Components       Enabled            10           sblapp2  Online
	# Field Service                            FieldSvc    Field Service Components                            Enabled            13           sblapp2  Online
	# <... more>
	# n rows returned.

	cgcount = 0
	for cgEntry in cgTbl:
		cgObj = cgEntry[0]
		logger.debug(' cgEntry[0]:',  cgEntry[0])
		cgDataRow = cgEntry[1]
		cgOSH = makeCompGrp(cgObj, cgDataRow, appServerOSH)
		cgcount += 1
		# in older versions, the component contains cg name
		# in later versions, the component contains cg alias
		cgName = cgObj[0]
		cgAlias = cgObj[1]
		cgOSH.setContainer(appServerOSH)
		OSHVResult.add(cgOSH)
		mapGroupNameToOSH.put(cgName,cgOSH)
		mapGroupNameToOSH.put(cgAlias,cgOSH)

	getGroupComponents(client, mapGroupNameToOSH, ip, OSHVResult, enterprise, siteOSH)
	logger.debug('parsed ', str(cgcount), ' component groups')
コード例 #27
0
    def __getUserInfo(self, username):
        """
            Query HibernateUser to get a user information
            There are users managed by internal auth manager with no attributes other than password
            There are users managed by external auth managers e.g. shibboleth who have attributes
            Each user, at the time of writing: 20140904, each user has multiple identical attribute sets,
            so, only the first one is used
            We put all available attributes of a user in to return value 
        """
        username = username.strip()

        authUserDao = ApplicationContextProvider.getApplicationContext().getBean("hibernateAuthUserDao")
        parameters = HashMap()
        parameters.put("username", username)
        userObjectList = authUserDao.query("getUser", parameters)

        userJson = JsonObject()
        userJson.put("username", username) 
        try:
            if userObjectList.size() > 0:
                # One hit will be enough to get user object
                userJson = self.__constructUserAttribs(userObjectList.get(0), self.ATTRIB_FILTER)
            else:
               # This should not be reached with external sourced users
                self.log.warn("Wrong username or internal user is queried")
        except Exception, e:
            self.log.error("%s: cannot construct user attribute JSON, detail = %s" % (self.__class__.__name__ , str(e)))
コード例 #28
0
    def get_doc_phrase_freq(self, phrase, field, slop, ordered):
        """
        Returns collection frequency for a given phrase and field.

        :param phrase: str
        :param field: field name
        :param slop: number of terms in between
        :param ordered: If true, term occurrences should be ordered
        :return: dictionary {doc: freq, ...}
        """
        # creates span near query
        span_near_query = self.get_span_query(phrase.split(" "), field, slop=slop, ordered=ordered)

        # extracts document frequency
        self.open_searcher()
        index_reader_context = self.searcher.getTopReaderContext()
        term_contexts = HashMap()
        terms = TreeSet()
        span_near_query.extractTerms(terms)
        for term in terms:
            term_contexts.put(term, TermContext.build(index_reader_context, term))
        leaves = index_reader_context.leaves()
        doc_phrase_freq = {}
        # iterates over all atomic readers
        for atomic_reader_context in leaves:
            bits = atomic_reader_context.reader().getLiveDocs()
            spans = span_near_query.getSpans(atomic_reader_context, bits, term_contexts)
            while spans.next():
                lucene_doc_id = spans.doc()
                doc_id = atomic_reader_context.reader().document(lucene_doc_id).get(self.FIELDNAME_ID)
                if doc_id not in doc_phrase_freq:
                    doc_phrase_freq[doc_id] = 1
                else:
                    doc_phrase_freq[doc_id] += 1
        return doc_phrase_freq
コード例 #29
0
ファイル: organiser.py プロジェクト: kiranba/the-fascinator
 def __getRvtNodes(self, manifest):
     rvtNodes = ArrayList()
     #print "manifest=%s" % manifest
     for key in manifest.keySet():
         package = False
         node = manifest.get(key)
         try:
             # add the node
             rvtNode = HashMap()
             if node.get("hidden") != "True":
                 relPath = node.get("id")
                 # check if node is a package
                 if relPath:
                     package = (self.__getContentType(relPath) == "application/x-fascinator-package")
                 else:
                     relPath = key.replace("node", "blank")
                 rvtNode.put("visible", True)
                 rvtNode.put("title", node.get("title"))
                 if package:
                     subManifest = self.__readManifest(relPath)
                     if subManifest:
                         subManifest = subManifest.getJsonMap("manifest")
                         rvtNode.put("children", self.__getRvtNodes(subManifest))
                     relPath = key.replace("node", "package")
                 else:
                     rvtNode.put("children", self.__getRvtNodes(node.getJsonMap("children")))
                 rvtNode.put("relPath", relPath)
                 rvtNodes.add(rvtNode)
         except Exception, e:
             log.error("Failed to process node '%s': '%s'" % (node.toString(), str(e)))
コード例 #30
0
ファイル: CQ5 Loader.py プロジェクト: amancevice/legacy
	def enhancePages( self, project, connector_url, cq_user, cq_password, page_query, page_queryLib ):	
				
		# Validate Parameters
		if not self.validInputs(connector_url,cq_user,cq_password,"asset_drive","asset_filenameAttr","asset_pathAttr",page_query,page_queryLib,"ia_path","ia_cacheCols"): return
		
		# HTTP Connection Details
		client = HttpClient()
		base64EncodedCredentials = Base64.encodeString("%s:%s" % (cq_user,cq_password))
		header = Header("Authorization", "Basic %s" % (base64EncodedCredentials))
		
		# Cycle through contentdescriptors specified by scope query
		query = queryLibrarianService.findQueryByName(page_queryLib, page_query)
		params = HashMap()
		params.put('projectId',project.id)
		res = queryLibrarianService.executeQuery(query, params)
		
		# Get cd
		for r in res:
			cd = r[0]
			post = PostMethod(connector_url)
			post.addRequestHeader(header)
			get = GetMethod(connector_url)
			get.addRequestHeader(header)
			try:
				payload = cd.getContent()[0].getContentData().encode("utf-8")
				[loadStatus, failureReason] = self.sendPayload(client,get,post,payload,cd.url)
				cd.metadata["Load.Status"] = loadStatus
				cd.metadata["Load.Failure Reason"] = failureReason
				if loadStatus != "SUCCESS":
					logger.error("url: %s, reason: %s" % (cd.url, failureReason))
			except:
				cd.metadata["Load.Status"] = "FAILURE"
				cd.metadata["Load.Failure Reason"] = "Null content"
				logger.error("url: %s, reason: %s" % (cd.url, failureReason))
コード例 #31
0
def handleFirstWindow(actor, window, eventType, returnList):

    core = NGECore.getInstance()
    index = int(returnList.get(0))
    selected = window.getObjectIdByIndex(index)
    hiring = actor.getSkillModBase('hiring')
    if selected == 1:
        suiOptions = HashMap()
        if hiring >= 10:
            suiOptions.put(Long(1), '@player_structure:terminal_bulky')
        if hiring >= 30:
            suiOptions.put(Long(2), '@player_structure:terminal_standard')
        if hiring >= 50:
            suiOptions.put(Long(3), '@player_structure:terminal_small')
        if hiring >= 75:
            suiOptions.put(Long(4), '@player_structure:terminal_fancy')
        if hiring >= 90:
            suiOptions.put(Long(5), '@player_structure:terminal_slim')
        window = core.suiService.createListBox(
            ListBoxType.LIST_BOX_OK_CANCEL,
            '@player_structure:terminal_type_t',
            '@player_structure:terminal_type_d', suiOptions, actor, None, 5)
        returnList = Vector()
        returnList.add('List.lstList:SelectedRow')
        window.addHandler(0, '', Trigger.TRIGGER_OK, returnList,
                          handleTerminalWindow)
        core.suiService.openSUIWindow(window)

    if selected == 2:
        suiOptions = HashMap()
        if hiring >= 20:
            suiOptions.put(Long(1), '@player_structure:droid_bartender')
        if hiring >= 50:
            suiOptions.put(Long(2), '@player_structure:droid_power')
        if hiring >= 60:
            suiOptions.put(Long(3), '@player_structure:droid_wed')
        if hiring >= 90:
            suiOptions.put(Long(4), '@player_structure:droid_surgical')
        if hiring >= 100:
            suiOptions.put(Long(5), '@player_structure:droid_protocol')
        window = core.suiService.createListBox(
            ListBoxType.LIST_BOX_OK_CANCEL, '@player_structure:droid_type_t',
            '@player_structure:droid_type_d', suiOptions, actor, None, 5)
        returnList = Vector()
        returnList.add('List.lstList:SelectedRow')
        window.addHandler(0, '', Trigger.TRIGGER_OK, returnList,
                          handleDroidWindow)
        core.suiService.openSUIWindow(window)

    return
コード例 #32
0
ファイル: timer_tab.py プロジェクト: affilares/graph-ql
class TimerTab(ITab, IHttpListener):
    def __init__(self, callbacks, helpers):
        self._callbacks = callbacks
        self._helpers = helpers
        self.isRunning = True
        self.toolFilter = 0
        self.reqResMap = HashMap()
        callbacks.registerHttpListener(self)
        self.panel = TimerPanel(
            logtable_factory=lambda model: LogTable(model, self._callbacks),
            external_clear_button_action_listener=lambda e: self.getReqResMap(
            ).clear(),
            external_start_button_action_listener=lambda e: self.setRunning(
                True),
            external_stop_button_action_listener=lambda e: self.setRunning(
                False),
            external_filter_action_listener=self.filter_action_listener,
            tools_keys=["All", "Proxy", "Intruder", "Scanner", "Repeater"])

    def getTabCaption(self):
        """
        Override ITab method
        :return: tab name
        """
        return "InQL Timer"

    def getUiComponent(self):
        """
        Override ITab method
        :return: Tab UI Component
        """
        self._callbacks.customizeUiComponent(self.panel.this)
        return self.panel.this

    def filter_action_listener(self, e):
        tool = e.getSource().getSelectedItem()
        if tool == "All":
            self.setToolFilter(0)
        elif tool == "Proxy":
            self.setToolFilter(IBurpExtenderCallbacks.TOOL_PROXY)
        elif tool == "Intruder":
            self.setToolFilter(IBurpExtenderCallbacks.TOOL_INTRUDER)
        elif tool == "Scanner":
            self.setToolFilter(IBurpExtenderCallbacks.TOOL_SCANNER)
        elif tool == "Repeater":
            self.setToolFilter(IBurpExtenderCallbacks.TOOL_REPEATER)
        else:
            raise RuntimeError("Unknown tool: %s" % tool)

    def setRunning(self, running):
        self.isRunning = running

    def setToolFilter(self, toolFilter):
        self.toolFilter = toolFilter

    def processHttpMessage(self, toolFlag, messageIsRequest, requestResponse):

        if self.isRunning:
            if self.toolFilter == 0 or self.toolFilter == toolFlag:
                messageInfo = self._helpers.analyzeRequest(requestResponse)
                url = messageInfo.getUrl()
                requestBody = requestResponse.getRequest(
                )[messageInfo.getBodyOffset():].tostring()
                if not is_query(requestBody):
                    return  # exit early
                qobj = json.loads(requestBody)
                queryBody = ""
                operationName = ""
                if 'query' in qobj:
                    queryBody = qobj['query']
                if 'operationName' in qobj:
                    operationName = qobj['operationName']
                if messageIsRequest:
                    self.reqResMap.put(url, System.currentTimeMillis())
                elif self.reqResMap.containsKey(url):
                    time = System.currentTimeMillis() - self.reqResMap.get(url)
                    self.reqResMap.remove(url)
                    # create a new log entry with the message details
                    synchronize.apply_synchronized(
                        self.panel.getLogTableModel().getLogArray(),
                        self.syncProcessHttpMessage,
                        (toolFlag, requestResponse, time, queryBody,
                         operationName))

    def syncProcessHttpMessage(self, toolFlag, messageInfo, time, queryBody,
                               operationName):
        row = self.panel.getLogTableModel().getLogArray().size()
        # Log all requests - the default
        if not self.panel.getQueryFilterText(
        ) and not self.panel.isScopeSelected():
            self.addLog(messageInfo, toolFlag, time, row, operationName)
        # Log filter URL requests
        elif not self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \
            self.panel.getQueryFilterText() in queryBody:
            self.addLog(messageInfo, toolFlag, time, row, operationName)
        # Log in-scope requests
        elif self.panel.isScopeSelected() and not self.panel.getQueryFilterText() and \
              self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()):
            self.addLog(messageInfo, toolFlag, time, row, operationName)
        # Log in-scope requests and filter
        elif self.panel.isScopeSelected() and self.panel.getQueryFilterText() and \
                self._callbacks.isInScope(self._helpers.analyzeRequest(messageInfo).getUrl()) and \
                self.panel.getQueryFilterText() in queryBody:
            self.addLog(messageInfo, toolFlag, time, row, operationName)

    def addLog(self, messageInfo, toolFlag, time, row, operationName):

        self.panel.getLogTableModel().getLogArray().add(
            Log(
                LocalDateTime.now(), self._callbacks.getToolName(toolFlag),
                self._callbacks.saveBuffersToTempFiles(messageInfo),
                self._helpers.analyzeRequest(messageInfo).getUrl(),
                self._helpers.analyzeResponse(
                    messageInfo.getResponse()).getStatusCode(), operationName,
                time))
        self.panel.getLogTableModel().fireTableRowsInserted(row, row)

    def getReqResMap(self):
        return self.reqResMap
コード例 #33
0
    def genernate_statistics_content(self, webpart):
        cache_key = "unit" + str(self.unit.unitId) + "_" + str(
            webpart.getUnitWebpartId())
        content = cache.get(cache_key)
        if content != None:
            request.setAttribute(cache_key, content)
            return
        map = HashMap()
        map.put("unit", self.unit)
        map.put("UnitRootUrl", self.unitRootUrl)
        map.put("webpart", webpart)
        # 查询下级机构数据
        map.put("unitAllUserCount",
                self.unitService.getAllUserCount(self.unit))
        map.put("unitAllArticleCount",
                self.unitService.getAllArticleCount(self.unit))
        map.put("unitAllResourceCount",
                self.unitService.getAllResourceCount(self.unit))
        map.put("unitAllPhotoCount",
                self.unitService.getAllPhotoCount(self.unit))
        map.put("unitAllVideoCount",
                self.unitService.getAllVideoCount(self.unit))

        content = self.templateProcessor.processTemplate(
            map, "/WEB-INF/unitspage/" + self.templateName + "/unit_count.ftl",
            "utf-8")
        request.setAttribute(cache_key, content)
        cache.put(cache_key, content)
コード例 #34
0
    def execute(self):
        #print "MemcachedExpireTimeConfig.getSiteIndexExpireTime()=",MemcachedExpireTimeConfig.getSiteIndexExpireTime()
        if self.viewcount_svc == None:
            self.addActionError(
                u"无法加载组织机构服务,请检查 applicationContext.xml 配置文件,是否缺少 viewCountService 节点。"
            )
            return self.ERROR

        if self.templateProcessor == None:
            self.addActionError(
                u"无法加载组织机构服务,请检查 applicationContext.xml 配置文件,是否缺少 templateProcessor 节点。"
            )
            return self.ERROR

        if self.unitService == None:
            self.addActionError(
                u"无法加载组织机构服务,请检查 applicationContext.xml 配置文件,是否缺少 unitService 节点。"
            )
            return self.ERROR

        self.unit = self.getUnit()
        if self.unit == None:
            self.addActionError(u"您所访问的机构不存在!")
            return self.ERROR
        #print "self.unit.parentId = ", self.unit.parentId

        if self.unit.parentId == 0:
            response.sendRedirect(request.getContextPath() + "/")
            return
        preview = self.params.safeGetStringParam("preview")
        if self.isUnitAdmin() == False or preview == "":
            fc = FileCache()
            html = ""
            out = response.getWriter()
            theme = self.params.safeGetStringParam("theme")

            if theme != "":
                #这是预览
                html = self.htmlGeneratorService.UnitIndex(
                    self.unit, "", theme)
                out.write(html)
            else:
                unitIndexHtmlPath = fc.getUnitHtmlFolder(
                    self.unit.unitName) + "index.html"
                if fc.contentIsExpired(
                        unitIndexHtmlPath,
                        MemcachedExpireTimeConfig.getSiteIndexExpireTime() /
                        60) == True:
                    html = self.htmlGeneratorService.UnitIndex(self.unit)
                    out.write(html)
                else:
                    file = File(unitIndexHtmlPath)
                    out.write(
                        CommonUtil.readFile(file.getCanonicalPath(), "UTF-8"))
                    file = None
            fc = None
            #request.getSession().getServletContext().getRequestDispatcher("/html/unit/" + self.unit.unitName + "/index.html").forward(request, response)
            return
        """""" """
                        注意:以下代码只有管理员执行!!!!!!!
        """ """"""
        self.templateName = "template1"
        if self.unit.templateName != None:
            self.templateName = self.unit.templateName

        webpartList = self.unitService.getUnitWebpartList(self.unit.unitId)
        if self.params.existParam("tm") == False:
            if len(webpartList) < 1:
                self.genWebparts()
                response.sendRedirect("?tm=1")
                return

        for webpart in webpartList:
            self.set_webpart_flag(webpart)
            #print "webpart.moduleName = ", webpart.moduleName
            # 生成具体的内容,放到content字段内
            if webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_ARTICLE:
                self.genernate_article_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_RESOURCE:
                self.genernate_resoure_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_PHOTO:
                self.genernate_photo_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_VIDEO:
                self.genernate_video_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_PICNEWS:
                self.genernate_picnews_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_NEWESTNEWS:
                self.genernate_newestnews_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_UNITNOTICE:
                self.genernate_unitnotice_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_LINKS:
                self.genernate_links_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_STATISTICS:
                self.genernate_statistics_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_VOTE:
                self.genernate_vote_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_UNITSUBJECT:
                self.genernate_unitsubject_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_UNITGROUP:
                self.genernate_group_content(webpart)
            elif webpart.moduleName == UnitWebpart.WEBPART_MODULENAME_UNITPREPARECOURSE:
                self.genernate_preparecourse_content(webpart)
            else:
                #自写内容,无需处理
                cache_key = "unit" + str(self.unit.unitId) + "_" + str(
                    webpart.getUnitWebpartId())
                content = cache.get(cache_key)
                if content != None:
                    request.setAttribute(cache_key, content)
                else:
                    map = HashMap()
                    map.put("unit", self.unit)
                    map.put("UnitRootUrl", self.unitRootUrl)
                    map.put("webpart", webpart)
                    content = self.templateProcessor.processTemplate(
                        map, "/WEB-INF/unitspage/" + self.templateName +
                        "/custorm.ftl", "utf-8")
                    request.setAttribute(cache_key, content)
                    cache.put(cache_key, content)

        theme = self.params.safeGetStringParam("theme")
        request.setAttribute("head_nav", "unit")
        request.setAttribute("unit", self.unit)
        request.setAttribute("webpartList", webpartList)
        if theme != "":
            request.setAttribute("theme", theme)

        if self.loginUser != None:
            request.setAttribute("loginUser", self.loginUser)

            if self.isUnitAdmin() == True and preview != "":
                request.setAttribute("role", "admin")

        request.setAttribute("req", request)
        return "/WEB-INF/unitspage/" + self.templateName + "/index.ftl"
コード例 #35
0
    def genernate_resoure_content(self, webpart):
        cache_key = "unit" + str(self.unit.unitId) + "_" + str(
            webpart.getUnitWebpartId())
        content = cache.get(cache_key)
        if content != None:
            request.setAttribute(cache_key, content)
            return

        map = HashMap()
        #最新资源
        qry = ResourceQuery(
            """ r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
            r.userId, r.subjectId as subjectId, grad.gradeName, sc.name as scName """
        )
        #qry.unitId = self.unit.unitId
        qry.custormAndWhereClause = " r.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' "
        new_resource_list = qry.query_map(10)
        map.put("new_resource_list", new_resource_list)

        #本周热门资源
        qry = ResourceQuery(
            """ r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
            r.userId, msubj.msubjName, grad.gradeName, sc.name as scName """)
        qry.orderType = 4  # downloadCount DESC
        #qry.unitId = self.unit.unitId
        qry.custormAndWhereClause = " r.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' "
        hot_resource_list = qry.query_map(10)
        map.put("hot_resource_list", hot_resource_list)

        #hot_resource_list = self.viewcount_svc.getViewCountListShared(12,7,10,self.unit.unitPath,self.unit.unitDepth);
        #map.put("hot_resource_list", hot_resource_list)

        # 推荐资源
        qry = ResourceQuery(
            """ r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
            r.userId,msubj.msubjName, grad.gradeName, sc.name as scName """)
        #qry.rcmdState = True
        #qry.unitId = self.unit.unitId
        qry.custormAndWhereClause = " r.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' And r.rcmdPathInfo Like '%/" + str(
                self.unit.unitId) + "/%' "
        rcmd_resource_list = qry.query_map(10)
        map.put("rcmd_resource_list", rcmd_resource_list)

        map.put("unit", self.unit)
        map.put("webpart", webpart)
        map.put("UnitRootUrl", self.unitRootUrl)
        content = self.templateProcessor.processTemplate(
            map, "/WEB-INF/unitspage/" + self.templateName + "/resource.ftl",
            "utf-8")
        request.setAttribute(cache_key, content)
        cache.put(cache_key, content)
コード例 #36
0
    def genernate_article_content(self, webpart):
        cache_key = "unit" + str(self.unit.unitId) + "_" + str(
            webpart.getUnitWebpartId())
        content = cache.get(cache_key)
        #content = None
        if content != None:
            request.setAttribute(cache_key, content)
            return

        map = HashMap()
        qry = ArticleQuery(
            """ a.articleId, a.title, a.createDate,a.typeState, u.userId, u.userIcon, u.nickName, u.loginName """
        )
        qry.custormAndWhereClause = "a.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' "

        newest_article_list = qry.query_map(10)
        map.put("newest_article_list", newest_article_list)

        #hot_article_list=self.viewcount_svc.getViewCountListShared(3,7,10,self.unit.unitPath,self.unit.unitDepth);
        #map.put("hot_article_list",hot_article_list)

        qry = ArticleQuery(
            """ a.articleId, a.title, a.createDate,a.typeState, u.userId, u.userIcon, u.nickName, u.loginName """
        )
        qry.custormAndWhereClause = " a.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' "
        qry.orderType = 2
        hot_article_list = qry.query_map(10)
        map.put("hot_article_list", hot_article_list)

        qry = ArticleQuery(
            """ a.articleId, a.title, a.createDate, a.articleAbstract, a.articleContent, a.typeState, u.userId, u.userIcon, u.nickName, u.loginName """
        )
        #qry.rcmdState = True
        qry.custormAndWhereClause = " a.approvedPathInfo Like '%/" + str(
            self.unit.unitId) + "/%' and a.rcmdPathInfo Like '%/" + str(
                self.unit.unitId) + "/%' "
        rcmd_article_list = qry.query_map(10)
        map.put("rcmd_article_list", rcmd_article_list)

        map.put("unit", self.unit)
        map.put("webpart", webpart)
        map.put("UnitRootUrl", self.unitRootUrl)
        content = self.templateProcessor.processTemplate(
            map, "/WEB-INF/unitspage/" + self.templateName + "/article.ftl",
            "utf-8")
        request.setAttribute(cache_key, content)
        cache.put(cache_key, content)
コード例 #37
0
ファイル: listeners.py プロジェクト: yaogao000/robotframework
 def _to_map(self, dictionary):
     map = HashMap()
     for key, value in dictionary.iteritems():
         map.put(key, value)
     return map
コード例 #38
0
ファイル: smooth3dBg.py プロジェクト: stefanovaleri/jCAE
if options.coplanarity:
    liaison.getMesh().buildRidges(options.coplanarity)
if options.preserveGroups:
    liaison.getMesh().buildGroupBoundaries()

if options.recordFile:
    cmds = [
        String("assert self.m.checkNoDegeneratedTriangles()"),
        String("assert self.m.checkNoInvertedTriangles()"),
        String("assert self.m.checkVertexLinks()"),
        String("assert self.m.isValid()")
    ]
    liaison.getMesh().getTrace().setHooks(cmds)

opts = HashMap()
opts.put("iterations", str(options.iterations))
opts.put("boundaries", str(options.boundaries))
opts.put("check", str(options.check))
opts.put("size", str(options.size))
opts.put("tolerance", str(options.tolerance))
opts.put("relaxation", str(options.relaxation))
opts.put("refresh", str(options.refresh))
if (options.coplanarity >= 0.0):
    opts.put("coplanarity", str(options.coplanarity))
if options.prefix:
    sm = MySmoothNodes3DBg(liaison, opts, options.prefix)
else:
    sm = SmoothNodes3DBg(liaison, opts)
sm.setProgressBarStatus(10000)
sm.compute()
if options.recordFile:
コード例 #39
0
ファイル: search.py プロジェクト: wdqatualr/hoaxy-backend
class Searcher():
    """A simple interface to search articles.

    In this class `MultiFieldQueryParse`, `DuplicateFilter` are used to
    accomplish our application: query should apply on multiple fields,
    duplication should be avoid.
    """
    def __init__(self,
                 index_dir,
                 search_fields=['canonical_url', 'title', 'meta', 'content'],
                 unique_field='uq_id_str',
                 boost=dict(canonical_url=4.0,
                            title=8.0,
                            meta=2.0,
                            content=1.0),
                 date_format='%Y-%m-%dT%H:%M:%S'):
        """Constructor of Searcher.

        Parameters
        ----------
        index_dir : string
            The location of lucene index.
        search_fields : list
            A list of field names indicating fields to search on.
        unique_field : string
            The field name, on which the duplication should avoid.
        boost : dict
            This dict control the weight when computing score.
        date_format : string
            Convert the string into datetime. Should consistent with the
            index part.
        """
        self.index_dir = index_dir
        self.search_fields = search_fields
        self.sort_by_recent = Sort(
            SortField('date_published', SortField.Type.STRING, True))
        self.store = FSDirectory.open(File(index_dir))
        self.reader = DirectoryReader.open(self.store)
        self.isearcher = IndexSearcher(self.reader)
        self.analyzer = StandardAnalyzer()
        self.dup_filter = DuplicateFilter(unique_field)
        self.boost_map = HashMap()
        for k, v in boost.iteritems():
            self.boost_map.put(k, Float(v))
        self.mul_parser = MultiFieldQueryParser(search_fields, self.analyzer,
                                                self.boost_map)
        self.date_format = date_format

    def prepare_chained_filter(self, dt1, dt2):
        """Return a chained filter."""
        return ChainedFilter([
            self.dup_filter,
            TermRangeFilter(
                'date_published', BytesRef(dt1.strftime(self.date_format)),
                BytesRef(dt2.strftime(self.date_format)), True, True)
        ], [ChainedFilter.AND, ChainedFilter.AND])

    def refresh(self):
        """Refresh the searsher, if index is changed."""
        nireader = DirectoryReader.openIfChanged(self.reader)
        if nireader:
            self.reader.close()
            self.reader = nireader
            self.isearcher = IndexSearcher(self.reader)
            logger.debug('Index file changed, freshed')
        else:
            logger.debug('Index file did not change.')

    def fetch_one_doc(self, score_doc):
        """Fetch one document from the scored doc results."""
        doc = self.isearcher.doc(score_doc.doc)
        return (
            doc.getField("group_id").numericValue().intValue(),
            doc.get("canonical_url"),
            doc.get("title"),
            doc.get("date_published"),
            doc.get("domain"),
            doc.get("site_type"),
            score_doc.score,
        )

    def search(self,
               query,
               n1=100,
               n2=100000,
               sort_by='relevant',
               use_lucene_syntax=False,
               min_score_of_recent_sorting=0.4,
               min_date_published=None):
        """Return the matched articles from lucene.

        Parameters
        ----------
        query : string
            The query string.
        n1 : int
            How many result finally returned.
        n2 : int
            How many search results returned when sort by recent.
        sort_by : string
            {'relevant', 'recent'}, the sorting order when doing lucene searching.
        min_score_of_recent_sorting : float
            The min score when sorting by 'recent'.
        min_date_published : datetime<Plug>(neosnippet_expand)
            The min date_published when filtering lucene searching results.

        Returns
        -------
        tuple
            (total_hits, df), where total_hits represents the total number
            of hits and df is a pandas.DataFrame object. df.columns = ['id',
            'canonical_url', 'title', 'date_published', 'domain', 'site_type',
            'score']
        """
        if min_date_published is not None:
            dt2 = datetime.utcnow()
            if isinstance(min_date_published, datetime):
                dt1 = min_date_published
            elif isinstance(min_date_published, basestring):
                dt1 = utc_from_str(min_date_published)
            sf = self.prepare_chained_filter(dt1, dt2)
        else:
            sf = self.dup_filter
        try:
            if use_lucene_syntax is False:
                query = clean_query(query)
            q = self.mul_parser.parse(self.mul_parser, query)
            logger.debug('Parsed query: %s', q)
        except Exception as e:
            logger.error(e)
            if use_lucene_syntax is True:
                raise APIParseError("""Error when parse the query string! \
You are quering with lucene syntax, be careful of your query string!""")
            else:
                raise APIParseError('Error when parse the query string!')

        cnames = [
            'id', 'canonical_url', 'title', 'date_published', 'domain',
            'site_type', 'score'
        ]
        if sort_by == 'relevant':
            top_docs = self.isearcher.search(q, sf, n1)
            score_docs = top_docs.scoreDocs
            total_hits = top_docs.totalHits
            if total_hits == 0:
                df = pd.DataFrame()
            else:
                records = [self.fetch_one_doc(sd) for sd in score_docs]
                df = pd.DataFrame(records, columns=cnames)
                df['date_published'] = pd.to_datetime(df['date_published'])
            return total_hits, df
        elif sort_by == 'recent':
            counter = 0
            records = []
            top_field_docs = self.isearcher.search(q, sf, n2,
                                                   self.sort_by_recent, True,
                                                   True)
            if top_field_docs.maxScore >= min_score_of_recent_sorting:
                for sd in top_field_docs.scoreDocs:
                    if sd.score >= min_score_of_recent_sorting:
                        records.append(self.fetch_one_doc(sd))
                        counter += 1
                        if counter == n1:
                            break
            if counter == 0:
                df = pd.DataFrame()
            else:
                df = pd.DataFrame(records, columns=cnames)
                df['date_published'] = pd.to_datetime(df['date_published'])
            return counter, df
コード例 #40
0
ファイル: thalamus.py プロジェクト: mcchong/nengo_1.4
def make(net,
         name='Network Array',
         neurons=50,
         dimensions=2,
         inhib_scale=3,
         tau_inhib=.005,
         useQuick=True):
    thalamus = net.make_array(name,
                              neurons,
                              dimensions,
                              max_rate=(100, 300),
                              intercept=(-1, 0),
                              radius=1,
                              encoders=[[1]],
                              quick=useQuick)

    # setup inhibitory scaling matrix
    inhib_scaling_matrix = [[0] * dimensions for i in range(dimensions)]
    for i in range(dimensions):
        inhib_scaling_matrix[i][i] = -inhib_scale
    # setup inhibitory matrix
    inhib_matrix = []
    for i in range(dimensions):
        inhib_matrix_part = [[inhib_scaling_matrix[i]] * neurons]
        inhib_matrix.append(inhib_matrix_part[0])

    thalamus.addTermination('bg_input', inhib_matrix, tau_inhib, False)

    def addOne(x):
        return [x[0] + 1]

    net.connect(thalamus,
                None,
                func=addOne,
                origin_name='xBiased',
                create_projection=False)

    if net.network.getMetaData("Thalamus") == None:
        net.network.setMetaData("Thalamus", HashMap())
    thals = net.network.getMetaData("Thalamus")

    thal = HashMap(6)
    thal.put("name", name)
    thal.put("neurons", neurons)
    thal.put("dimensions", dimensions)
    thal.put("inhib_scale", inhib_scale)
    thal.put("tau_inhib", tau_inhib)
    thal.put("useQuick", useQuick)

    thals.put(name, thal)

    if net.network.getMetaData("templates") == None:
        net.network.setMetaData("templates", ArrayList())
    templates = net.network.getMetaData("templates")
    templates.add(name)

    return thalamus
コード例 #41
0
class PagingWindow(UserList):
    def __init__(self, **kwargs):
        # The list is contained w/in self.data
        UserList.__init__(self)
        
        self.limitSize = 0      # Unbounded
        self.limitMin = False   # unbounded
        self.limitMax = False   # unbounded
        
        self.minElmIndx = -1
        self.minElmVal  = None
        self.maxElmIndx = -1
        self.maxElmVal  = None

        # Create a hash map for storing an object as well.
        self.hash = HashMap()

        opts = { 'limitSize' : self.optLimitSize,
                 'limitMin'  : self.optLimitMin,
                 'limitMax'  : self.optLimitMax,
                 
                 'preserveSmallest'  : self.optPreserveSmallest,
                 'preserveLargest'   : self.optPreserveLargest,
        }

        # Process each optional argumen.
        for k in kwargs.keys():
            optFunc = opts[k]
            if optFunc is None:
                raise LookupError("Option [k] is not supported by the PagingWindow class.")
            else:
                optFunc(kwargs[k])
        
        random.seed(time.time())
        
    def optLimitSize(self, _size):
        if type(_size).__name__ != "int":
            raise ValueError("limitSize parameter must be type int. Got type [{}].".format(type(_size).__name__))
        self.limitSize = _size
        
    def optLimitMin(self, _min):
        if type(_min).__name__ != "bool":
            raise ValueError("limitMin parameter must be type bool.")
        self.limitMin = _min

    def optLimitMax(self, _max):
        if type(_max).__name__ != "bool":
            raise ValueError("limitMax parameter must be type bool.")
        self.limitMax = _max
        
    def optPreserveSmallest(self, _small):
        if type(_small).__name__ != "bool":
            raise ValueError("preserveSmallest parameter must be type bool.")
        if _small:
            self.limitMin = False
            self.limitMax = True

    def optPreserveLargest(self, _large):
        if type(_large).__name__ != "bool":
            raise ValueError("preserveLargest parameter must be type bool.")
        if _large:
            self.limitMin = True
            self.limitMax = False


    def add(self, _key, _value = None):

        # print "==> value[{}] limitSize[{}]".format(_key, self.limitSize)
        # print "==> data.__len__[%d]" % self.data.__len__()

        dataLen = self.data.__len__()

        if dataLen < self.limitSize:
            ''' Here we add to the list when the list had not reached its
                max size.
            '''
            # print "..> appeding to data: [%s]" % _key
            self.data.append(_key)

            if _value is not None:
                # print " ++> added _value[{}]".format(_value)
                self.hash.put(_key, _value)

            # We should remove the sort on every insert.
            # Use sortedcontainers instead.
            self.data.sort()
        else:
            # print "..> not appending to data: [%s]" % _key

            insertMinOk = True
            insertMaxOk = True

            if self.limitMin:
                ''' If the new value is greater than the current minElement,
                    we may need to remove the current minElement to make room
                    for the new value.
                '''
                if self.data.__len__ > 0:
                    # The minElmIndx is always 0,
                    # unless the array has no data.
                    self.minElmIndx = 0
                else:
                    self.minElmIndx = -1
                if self.minElmIndx >= 0:
                    self.minElmVal = self.data[self.minElmIndx]
                    if _key < self.minElmVal:
                        insertMinOk = False
                        
            if self.limitMax:
                ''' If the new value is smaller than the current maxElement,
                    we may need to remove the current maxElement to make room
                    for the new value.
                '''
                self.maxElmIndx = self.data.__len__() - 1
                if self.maxElmIndx > 0:
                    self.maxElmVal = self.data[self.maxElmIndx]
                    if _key > self.maxElmVal:
                        insertMaxOk = False

            if self.limitMin and self.limitMax:
                     ''' Handle the case where it is ok to insert for either
                         case of limitMin and limitMax
                     '''
                     if insertMinOk and insertMaxOk:
                         # choseSize() may be a custom function that gets passed in.
                         side = self.choseSide(_key)
                         if side == 0:
                             raise AssertionError("chooseSide() should not return 0 as a result")
                         if side < 0:
                             if self.minElmVal is not None:
                                 self.data.remove(self.minElmVal)
                                 if self.hash.containsKey(self.minElmVal):
                                     self.hash.remove(self.minElmVal)
                         if side > 0:
                             if self.maxElmVal is not None:
                                 self.data.remove(self.maxElmVal)
                                 if self.hash.containsKey(self.maxElmVal):
                                     self.hash.remove(self.maxElmVal)
            else:
                if self.limitMin:
                    if insertMinOk:
                        if self.minElmVal is not None:
                            self.data.remove(self.minElmVal)
                            if self.hash.containsKey(self.maxElmVal):
                                self.hash.remove(self.maxElmVal)
                    else:
                        if self.data.__len__() + 1 > self.limitSize:
                            return False

                if self.limitMax:
                    if insertMaxOk:
                        if self.maxElmVal is not None:
                            self.data.remove(self.maxElmVal)
                            if self.hash.containsKey(self.maxElmVal):
                                self.hash.remove(self.maxElmVal)
                    else:
                        if self.data.__len__() + 1 > self.limitSize:
                            return False

            self.data.append(_key)
            if _value is not None:
                # print " ++> added _value[{}]".format(_value)
                self.hash.put(_key, _value)

            # We should remove the sort on every insert.
            # Possibly use sortedcontainers instead.
            self.data.sort()
            # Return True when a value is added
            return True

    def pop(self, indx):
        # Pop the 0 item from the list
        _key = super(UserList, self).pop(indx)

        # By default, return the key.
        retVal = _key

        # But, if the key has a corresponding value in the hash...
        if self.hash.containsKey(_key):
            # return the hash...
            retVal = [ _key, self.hash.get(_key) ]
            # and removed the object from the hash
            self.hash.remove(_key)

        return retVal

    def chooseSide(_key):
        r = random.getrandbits(1)

        if (r == 0):
            return -1
            
        if (r == 1):
            return 1

    def size(self):
        return self.data.__len__()
コード例 #42
0
ファイル: dataset-rules.py プロジェクト: andrewjanke/redbox
class IndexData:
    def __activate__(self, context):
        # Prepare variables
        self.index = context["fields"]
        self.object = context["object"]
        self.payload = context["payload"]
        self.params = context["params"]
        self.utils = context["pyUtils"]
        self.config = context["jsonConfig"]
        self.log = context["log"]
        self.last_modified = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
        self.log.debug("Indexing Metadata Record '{}' '{}'",
                       self.object.getId(), self.payload.getId())

        # Common data
        self.__newDoc()
        self.packagePid = None
        pidList = self.object.getPayloadIdList()
        for pid in pidList:
            if pid.endswith(".tfpackage"):
                self.packagePid = pid

        # Real metadata
        if self.itemType == "object":
            self.__basicData()
            self.__metadata()
            # Some of the above steps may request some
            #  messages be sent, particularly workflows
            self.__messages()

        # Make sure security comes after workflows
        self.__security()

    def __newDoc(self):
        self.oid = self.object.getId()
        self.pid = self.payload.getId()
        metadataPid = self.params.getProperty("metaPid", "DC")

        self.utils.add(self.index, "storage_id", self.oid)
        if self.pid == metadataPid:
            self.itemType = "object"
        else:
            self.oid += "/" + self.pid
            self.itemType = "datastream"
            self.utils.add(self.index, "identifier", self.pid)

        self.utils.add(self.index, "id", self.oid)
        self.utils.add(self.index, "item_type", self.itemType)
        self.utils.add(self.index, "last_modified", self.last_modified)
        self.utils.add(self.index, "harvest_config",
                       self.params.getProperty("jsonConfigOid"))
        self.utils.add(self.index, "harvest_rules",
                       self.params.getProperty("rulesOid"))

        self.item_security = []
        self.owner = self.params.getProperty("owner", "guest")
        formatter = SimpleDateFormat('yyyyMMddHHmmss')
        self.params.setProperty("last_modified", formatter.format(Date()))
        self.utils.add(self.index, "date_object_created",
                       self.params.getProperty("date_object_created"))
        self.params.setProperty(
            "date_object_modified",
            time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
        self.utils.add(self.index, "date_object_modified",
                       self.params.getProperty("date_object_modified"))

    def __basicData(self):
        self.utils.add(self.index, "repository_name",
                       self.params["repository.name"])
        self.utils.add(self.index, "repository_type",
                       self.params["repository.type"])
        if self.params["date_transitioned"] is not None:
            self.utils.add(self.index, "date_transitioned",
                           self.params["date_transitioned"])
        # VITAL integration
        vitalPid = self.params["vitalPid"]
        if vitalPid is not None:
            self.utils.add(self.index, "vitalPid", vitalPid)
        # Persistent Identifiers
        pidProperty = self.config.getString(None, ["curation", "pidProperty"])
        if pidProperty is None:
            self.log.error("No configuration found for persistent IDs!")
        else:
            pid = self.params[pidProperty]
            if pid is not None:
                self.utils.add(self.index, "known_ids", pid)
                self.utils.add(self.index, "pidProperty", pid)
                self.utils.add(self.index, "oai_identifier", pid)
        self.utils.add(self.index, "oai_set", "default")
        # Publication
        published = self.params["published"]
        if published is not None:
            self.utils.add(self.index, "published", "true")

    def __security(self):
        # Security
        roles = self.utils.getRolesWithAccess(self.oid)
        if roles is not None:
            # For every role currently with access
            for role in roles:
                # Should show up, but during debugging we got a few
                if role != "":
                    if role in self.item_security:
                        # They still have access
                        self.utils.add(self.index, "security_filter", role)
                    else:
                        # Their access has been revoked
                        self.__revokeRoleAccess(role)
            # Now for every role that the new step allows access
            for role in self.item_security:
                if role not in roles:
                    # Grant access if new
                    self.__grantRoleAccess(role)
                    self.utils.add(self.index, "security_filter", role)

        # No existing security
        else:
            if self.item_security is None:
                # Guest access if none provided so far
                self.__grantRoleAccess("guest")
                self.utils.add(self.index, "security_filter", role)
            else:
                # Otherwise use workflow security
                for role in self.item_security:
                    # Grant access if new
                    self.__grantRoleAccess(role)
                    self.utils.add(self.index, "security_filter", role)

        users = self.utils.getUsersWithAccess(self.oid)
        if users is not None:
            # For every role currently with access
            for user in users:
                self.utils.add(self.index, "security_exception", user)

        # Ownership
        if self.owner is None:
            self.utils.add(self.index, "owner", "system")
        else:
            self.utils.add(self.index, "owner", self.owner)

    def __indexList(self, name, values):
        # convert to set so no duplicate values
        for value in HashSet(values):
            self.utils.add(self.index, name, value)

    def __grantRoleAccess(self, newRole):
        schema = self.utils.getAccessSchema()
        schema.setRecordId(self.oid)
        schema.set("role", newRole)
        self.utils.setAccessSchema(schema)

    def __grantUserAccess(self, newUser):
        schema = self.utils.getAccessSchema()
        schema.setRecordId(self.oid)
        schema.set("user", newUser)
        self.utils.setAccessSchema(schema)

    def __revokeRoleAccess(self, oldRole):
        schema = self.utils.getAccessSchema()
        schema.setRecordId(self.oid)
        schema.set("role", oldRole)
        self.utils.removeAccessSchema(schema)

    def __revokeUserAccess(self, oldUser):
        schema = self.utils.getAccessSchema()
        schema.setRecordId(self.oid)
        schema.set("user", oldUser)
        self.utils.removeAccessSchema(schema)

    def __metadata(self):
        self.title = None
        self.dcType = None
        self.descriptionList = []
        self.creatorList = []
        self.creationDate = []
        self.contributorList = []
        self.approverList = []
        self.formatList = ["application/x-fascinator-package"]
        self.fulltext = []
        self.relationDict = {}
        self.customFields = {}
        self.creatorFullNameMap = HashMap()
        self.grantNumberList = []
        self.arrayBucket = HashMap()
        self.compFields = [
            "dc:coverage.vivo:DateTimeInterval", "locrel:prc.foaf:Person"
        ]
        self.compFieldsConfig = {
            "dc:coverage.vivo:DateTimeInterval": {
                "delim": " to ",
                "start": "start",
                "end": "end"
            },
            "locrel:prc.foaf:Person": {
                "delim": ", ",
                "start": "familyName",
                "end": "givenName"
            }
        }
        self.reportingFieldPrefix = "reporting_"
        self.embargoedDate = None
        self.createTimeStamp = None

        # Try our data sources, order matters
        self.__workflow()

        # Some defaults if the above failed
        if self.title is None:
            self.title = "New Dataset"
        if self.formatList == []:
            source = self.object.getPayload(self.packagePid)
            self.formatList.append(source.getContentType())

        # Index our metadata finally
        self.utils.add(self.index, "dc_title", self.title)
        if self.dcType is not None:
            self.utils.add(self.index, "dc_type", self.dcType)
        self.__indexList(
            "dc_creator",
            self.creatorList)  #no dc_author in schema.xml, need to check
        self.__indexList("dc_contributor", self.contributorList)
        self.__indexList("dc_description", self.descriptionList)
        self.__indexList("dc_format", self.formatList)
        self.__indexList("dc_date", self.creationDate)
        self.__indexList("full_text", self.fulltext)
        for key in self.customFields:
            self.__indexList(key, self.customFields[key])
        for key in self.relationDict:
            self.__indexList(key, self.relationDict[key])
        if self.arrayBucket.size() > 0:
            for arrFldName in self.arrayBucket.keySet():
                if arrFldName.endswith("Person") or arrFldName.replace(
                        self.reportingFieldPrefix, "") in self.compFields:
                    self.__indexList(arrFldName,
                                     self.arrayBucket.get(arrFldName).values())
                else:
                    self.__indexList(arrFldName,
                                     self.arrayBucket.get(arrFldName))
        if self.embargoedDate is not None:
            self.utils.add(self.index, "date_embargoed",
                           self.embargoedDate + "T00:00:00Z")
        if self.createTimeStamp is None:
            self.utils.add(
                self.index, "create_timestamp",
                time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))

    def __workflow(self):
        # Workflow data
        WORKFLOW_ID = "dataset"
        wfChanged = False
        workflow_security = []
        self.message_list = None
        stages = self.config.getJsonSimpleList(["stages"])
        if self.owner == "guest":
            pageTitle = "Submission Request"
            displayType = "submission-request"
            initialStep = 0
        else:
            pageTitle = "Metadata Record"
            displayType = "package-dataset"
            initialStep = 1
        try:
            wfMeta = self.__getJsonPayload("workflow.metadata")
            wfMeta.getJsonObject().put("pageTitle", pageTitle)

            # Are we indexing because of a workflow progression?
            targetStep = wfMeta.getString(None, ["targetStep"])
            if targetStep is not None and targetStep != wfMeta.getString(
                    None, ["step"]):
                wfChanged = True
                # Step change
                wfMeta.getJsonObject().put("step", targetStep)
                wfMeta.getJsonObject().remove("targetStep")
            # This must be a re-index then
            else:
                targetStep = wfMeta.getString(None, ["step"])

            # Security change
            for stage in stages:
                if stage.getString(None, ["name"]) == targetStep:
                    wfMeta.getJsonObject().put(
                        "label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    if wfChanged == True:
                        self.message_list = stage.getStringList(["message"])
        except StorageException:
            # No workflow payload, time to create
            initialStage = stages.get(initialStep).getString(None, ["name"])
            wfChanged = True
            wfMeta = JsonSimple()
            wfMetaObj = wfMeta.getJsonObject()
            wfMetaObj.put("id", WORKFLOW_ID)
            wfMetaObj.put("step", initialStage)
            wfMetaObj.put("pageTitle", pageTitle)
            stages = self.config.getJsonSimpleList(["stages"])
            for stage in stages:
                if stage.getString(None, ["name"]) == initialStage:
                    wfMetaObj.put("label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    self.message_list = stage.getStringList(["message"])

        # Has the workflow metadata changed?
        if wfChanged == True:
            inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8")
            try:
                StorageUtils.createOrUpdatePayload(self.object,
                                                   "workflow.metadata",
                                                   inStream)
            except StorageException:
                print " ERROR updating dataset payload"

        # Form processing
        coreFields = [
            "title", "description", "manifest", "metaList", "relationships",
            "responses"
        ]
        formData = wfMeta.getObject(["formData"])
        if formData is not None:
            formData = JsonSimple(formData)
            # Core fields
            description = formData.getStringList(["description"])
            if description:
                self.descriptionList = description
            # Non-core fields
            data = formData.getJsonObject()
            for field in data.keySet():
                if field not in coreFields:
                    self.customFields[field] = formData.getStringList([field])

        # Manifest processing (formData not present in wfMeta)
        manifest = self.__getJsonPayload(self.packagePid)
        formTitles = manifest.getStringList(["title"])
        if formTitles:
            for formTitle in formTitles:
                if self.title is None:
                    self.title = formTitle
        self.descriptionList = [manifest.getString("", ["description"])]

        #Used to make sure we have a created date
        createdDateFlag = False

        formData = manifest.getJsonObject()

        for field in formData.keySet():
            if field not in coreFields:
                value = formData.get(field)
                if value is not None and value.strip() != "":
                    self.utils.add(self.index, field, value)
                    # We want to sort by date of creation, so it
                    # needs to be indexed as a date (ie. 'date_*')
                    if field == "dc:created":
                        parsedTime = time.strptime(value, "%Y-%m-%d")
                        solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ",
                                                 parsedTime)
                        self.utils.add(self.index, "date_created", solrTime)
                        self.log.debug("Set created date to :%s" % solrTime)
                        createdDateFlag = True
                    elif field == "redbox:embargo.dc:date":
                        self.embargoedDate = value
                    elif field == "create_timestamp":
                        self.createTimeStamp = value
                    # try to extract some common fields for faceting
                    if field.startswith("dc:") and \
                            not (field.endswith(".dc:identifier.rdf:PlainLiteral") \
                              or field.endswith(".dc:identifier") \
                              or field.endswith(".rdf:resource")):
                        # index dublin core fields for faceting
                        basicField = field.replace("dc:", "dc_")
                        dot = field.find(".")
                        if dot > 0:
                            facetField = basicField[:dot]
                        else:
                            facetField = basicField
                        #print "Indexing DC field '%s':'%s'" % (field, facetField)
                        if facetField == "dc_title":
                            if self.title is None:
                                self.title = value
                        elif facetField == "dc_type":
                            if self.dcType is None:
                                self.dcType = value
                        elif facetField == "dc_creator":
                            if basicField.endswith("foaf_name"):
                                self.utils.add(self.index, "dc_creator", value)
                        else:
                            self.utils.add(self.index, facetField, value)
                        # index keywords for lookup
                        if field.startswith("dc:subject.vivo:keyword."):
                            self.utils.add(self.index, "keywords", value)
                    # check if this is an array field
                    fnameparts = field.split(":")
                    if fnameparts is not None and len(fnameparts) >= 3:
                        if field.startswith("bibo") or field.startswith(
                                "skos"):
                            arrParts = fnameparts[1].split(".")
                        else:
                            arrParts = fnameparts[2].split(".")
                        # we're not interested in: Relationship, Type and some redbox:origin
                        if arrParts is not None and len(
                                arrParts) >= 2 and field.find(
                                    ":Relationship.") == -1 and field.find(
                                        "dc:type") == -1 and field.find(
                                            "redbox:origin"
                                        ) == -1 and arrParts[1].isdigit():
                            # we've got an array field
                            fldPart = ":%s" % arrParts[0]
                            prefixEndIdx = field.find(fldPart) + len(fldPart)
                            suffixStartIdx = prefixEndIdx + len(
                                arrParts[1]) + 1
                            arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[
                                suffixStartIdx:]
                            if field.endswith("Name"):
                                arrFldName = self.reportingFieldPrefix + field[:
                                                                               prefixEndIdx]
                            self.log.debug(
                                "Array Field name is:%s  from: %s, with value:%s"
                                % (arrFldName, field, value))

                            if field.endswith("Name"):
                                fullFieldMap = self.arrayBucket.get(arrFldName)
                                if fullFieldMap is None:
                                    fullFieldMap = HashMap()
                                    self.arrayBucket.put(
                                        arrFldName, fullFieldMap)
                                idx = arrParts[1]
                                fullField = fullFieldMap.get(idx)
                                if (fullField is None):
                                    fullField = ""
                                if (field.endswith("givenName")):
                                    fullField = "%s, %s" % (fullField, value)
                                if (field.endswith("familyName")):
                                    fullField = "%s%s" % (value, fullField)
                                self.log.debug("fullname now is :%s" %
                                               fullField)
                                fullFieldMap.put(idx, fullField)
                            else:
                                fieldlist = self.arrayBucket.get(arrFldName)
                                if fieldlist is None:
                                    fieldlist = []
                                    self.arrayBucket.put(arrFldName, fieldlist)
                                fieldlist.append(value)

                    for compfield in self.compFields:
                        if field.startswith(compfield):
                            arrFldName = self.reportingFieldPrefix + compfield
                            fullFieldMap = self.arrayBucket.get(arrFldName)
                            if fullFieldMap is None:
                                fullFieldMap = HashMap()
                                self.arrayBucket.put(arrFldName, fullFieldMap)
                            fullField = fullFieldMap.get("1")
                            if fullField is None:
                                fullField = ""
                            if field.endswith(
                                    self.compFieldsConfig[compfield]["end"]):
                                fullField = "%s%s%s" % (
                                    fullField,
                                    self.compFieldsConfig[compfield]["delim"],
                                    value)
                            if field.endswith(
                                    self.compFieldsConfig[compfield]["start"]):
                                fullField = "%s%s" % (value, fullField)
                            self.log.debug("full field now is :%s" % fullField)
                            fullFieldMap.put("1", fullField)

        self.utils.add(self.index, "display_type", displayType)

        # Make sure we have a creation date
        if not createdDateFlag:
            self.utils.add(self.index, "date_created", self.last_modified)
            self.log.debug(
                "Forced creation date to %s because it was not explicitly set."
                % self.last_modified)

        # Workflow processing
        wfStep = wfMeta.getString(None, ["step"])
        self.utils.add(self.index, "workflow_id",
                       wfMeta.getString(None, ["id"]))
        self.utils.add(self.index, "workflow_step", wfStep)
        self.utils.add(self.index, "workflow_step_label",
                       wfMeta.getString(None, ["label"]))
        for group in workflow_security:
            self.utils.add(self.index, "workflow_security", group)
            if self.owner is not None:
                self.utils.add(self.index, "workflow_security", self.owner)
        # set OAI-PMH status to deleted
        if wfStep == "retired":
            self.utils.add(self.index, "oai_deleted", "true")

    def __messages(self):
        if self.message_list is not None and len(self.message_list) > 0:
            msg = JsonSimple()
            msg.getJsonObject().put("oid", self.oid)
            message = msg.toString()
            for target in self.message_list:
                self.utils.sendMessage(target, message)

    def __getJsonPayload(self, pid):
        payload = self.object.getPayload(pid)
        json = self.utils.getJsonObject(payload.open())
        payload.close()
        return json
コード例 #43
0
ファイル: dataset-rules.py プロジェクト: andrewjanke/redbox
    def __workflow(self):
        # Workflow data
        WORKFLOW_ID = "dataset"
        wfChanged = False
        workflow_security = []
        self.message_list = None
        stages = self.config.getJsonSimpleList(["stages"])
        if self.owner == "guest":
            pageTitle = "Submission Request"
            displayType = "submission-request"
            initialStep = 0
        else:
            pageTitle = "Metadata Record"
            displayType = "package-dataset"
            initialStep = 1
        try:
            wfMeta = self.__getJsonPayload("workflow.metadata")
            wfMeta.getJsonObject().put("pageTitle", pageTitle)

            # Are we indexing because of a workflow progression?
            targetStep = wfMeta.getString(None, ["targetStep"])
            if targetStep is not None and targetStep != wfMeta.getString(
                    None, ["step"]):
                wfChanged = True
                # Step change
                wfMeta.getJsonObject().put("step", targetStep)
                wfMeta.getJsonObject().remove("targetStep")
            # This must be a re-index then
            else:
                targetStep = wfMeta.getString(None, ["step"])

            # Security change
            for stage in stages:
                if stage.getString(None, ["name"]) == targetStep:
                    wfMeta.getJsonObject().put(
                        "label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    if wfChanged == True:
                        self.message_list = stage.getStringList(["message"])
        except StorageException:
            # No workflow payload, time to create
            initialStage = stages.get(initialStep).getString(None, ["name"])
            wfChanged = True
            wfMeta = JsonSimple()
            wfMetaObj = wfMeta.getJsonObject()
            wfMetaObj.put("id", WORKFLOW_ID)
            wfMetaObj.put("step", initialStage)
            wfMetaObj.put("pageTitle", pageTitle)
            stages = self.config.getJsonSimpleList(["stages"])
            for stage in stages:
                if stage.getString(None, ["name"]) == initialStage:
                    wfMetaObj.put("label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    self.message_list = stage.getStringList(["message"])

        # Has the workflow metadata changed?
        if wfChanged == True:
            inStream = IOUtils.toInputStream(wfMeta.toString(True), "UTF-8")
            try:
                StorageUtils.createOrUpdatePayload(self.object,
                                                   "workflow.metadata",
                                                   inStream)
            except StorageException:
                print " ERROR updating dataset payload"

        # Form processing
        coreFields = [
            "title", "description", "manifest", "metaList", "relationships",
            "responses"
        ]
        formData = wfMeta.getObject(["formData"])
        if formData is not None:
            formData = JsonSimple(formData)
            # Core fields
            description = formData.getStringList(["description"])
            if description:
                self.descriptionList = description
            # Non-core fields
            data = formData.getJsonObject()
            for field in data.keySet():
                if field not in coreFields:
                    self.customFields[field] = formData.getStringList([field])

        # Manifest processing (formData not present in wfMeta)
        manifest = self.__getJsonPayload(self.packagePid)
        formTitles = manifest.getStringList(["title"])
        if formTitles:
            for formTitle in formTitles:
                if self.title is None:
                    self.title = formTitle
        self.descriptionList = [manifest.getString("", ["description"])]

        #Used to make sure we have a created date
        createdDateFlag = False

        formData = manifest.getJsonObject()

        for field in formData.keySet():
            if field not in coreFields:
                value = formData.get(field)
                if value is not None and value.strip() != "":
                    self.utils.add(self.index, field, value)
                    # We want to sort by date of creation, so it
                    # needs to be indexed as a date (ie. 'date_*')
                    if field == "dc:created":
                        parsedTime = time.strptime(value, "%Y-%m-%d")
                        solrTime = time.strftime("%Y-%m-%dT%H:%M:%SZ",
                                                 parsedTime)
                        self.utils.add(self.index, "date_created", solrTime)
                        self.log.debug("Set created date to :%s" % solrTime)
                        createdDateFlag = True
                    elif field == "redbox:embargo.dc:date":
                        self.embargoedDate = value
                    elif field == "create_timestamp":
                        self.createTimeStamp = value
                    # try to extract some common fields for faceting
                    if field.startswith("dc:") and \
                            not (field.endswith(".dc:identifier.rdf:PlainLiteral") \
                              or field.endswith(".dc:identifier") \
                              or field.endswith(".rdf:resource")):
                        # index dublin core fields for faceting
                        basicField = field.replace("dc:", "dc_")
                        dot = field.find(".")
                        if dot > 0:
                            facetField = basicField[:dot]
                        else:
                            facetField = basicField
                        #print "Indexing DC field '%s':'%s'" % (field, facetField)
                        if facetField == "dc_title":
                            if self.title is None:
                                self.title = value
                        elif facetField == "dc_type":
                            if self.dcType is None:
                                self.dcType = value
                        elif facetField == "dc_creator":
                            if basicField.endswith("foaf_name"):
                                self.utils.add(self.index, "dc_creator", value)
                        else:
                            self.utils.add(self.index, facetField, value)
                        # index keywords for lookup
                        if field.startswith("dc:subject.vivo:keyword."):
                            self.utils.add(self.index, "keywords", value)
                    # check if this is an array field
                    fnameparts = field.split(":")
                    if fnameparts is not None and len(fnameparts) >= 3:
                        if field.startswith("bibo") or field.startswith(
                                "skos"):
                            arrParts = fnameparts[1].split(".")
                        else:
                            arrParts = fnameparts[2].split(".")
                        # we're not interested in: Relationship, Type and some redbox:origin
                        if arrParts is not None and len(
                                arrParts) >= 2 and field.find(
                                    ":Relationship.") == -1 and field.find(
                                        "dc:type") == -1 and field.find(
                                            "redbox:origin"
                                        ) == -1 and arrParts[1].isdigit():
                            # we've got an array field
                            fldPart = ":%s" % arrParts[0]
                            prefixEndIdx = field.find(fldPart) + len(fldPart)
                            suffixStartIdx = prefixEndIdx + len(
                                arrParts[1]) + 1
                            arrFldName = self.reportingFieldPrefix + field[:prefixEndIdx] + field[
                                suffixStartIdx:]
                            if field.endswith("Name"):
                                arrFldName = self.reportingFieldPrefix + field[:
                                                                               prefixEndIdx]
                            self.log.debug(
                                "Array Field name is:%s  from: %s, with value:%s"
                                % (arrFldName, field, value))

                            if field.endswith("Name"):
                                fullFieldMap = self.arrayBucket.get(arrFldName)
                                if fullFieldMap is None:
                                    fullFieldMap = HashMap()
                                    self.arrayBucket.put(
                                        arrFldName, fullFieldMap)
                                idx = arrParts[1]
                                fullField = fullFieldMap.get(idx)
                                if (fullField is None):
                                    fullField = ""
                                if (field.endswith("givenName")):
                                    fullField = "%s, %s" % (fullField, value)
                                if (field.endswith("familyName")):
                                    fullField = "%s%s" % (value, fullField)
                                self.log.debug("fullname now is :%s" %
                                               fullField)
                                fullFieldMap.put(idx, fullField)
                            else:
                                fieldlist = self.arrayBucket.get(arrFldName)
                                if fieldlist is None:
                                    fieldlist = []
                                    self.arrayBucket.put(arrFldName, fieldlist)
                                fieldlist.append(value)

                    for compfield in self.compFields:
                        if field.startswith(compfield):
                            arrFldName = self.reportingFieldPrefix + compfield
                            fullFieldMap = self.arrayBucket.get(arrFldName)
                            if fullFieldMap is None:
                                fullFieldMap = HashMap()
                                self.arrayBucket.put(arrFldName, fullFieldMap)
                            fullField = fullFieldMap.get("1")
                            if fullField is None:
                                fullField = ""
                            if field.endswith(
                                    self.compFieldsConfig[compfield]["end"]):
                                fullField = "%s%s%s" % (
                                    fullField,
                                    self.compFieldsConfig[compfield]["delim"],
                                    value)
                            if field.endswith(
                                    self.compFieldsConfig[compfield]["start"]):
                                fullField = "%s%s" % (value, fullField)
                            self.log.debug("full field now is :%s" % fullField)
                            fullFieldMap.put("1", fullField)

        self.utils.add(self.index, "display_type", displayType)

        # Make sure we have a creation date
        if not createdDateFlag:
            self.utils.add(self.index, "date_created", self.last_modified)
            self.log.debug(
                "Forced creation date to %s because it was not explicitly set."
                % self.last_modified)

        # Workflow processing
        wfStep = wfMeta.getString(None, ["step"])
        self.utils.add(self.index, "workflow_id",
                       wfMeta.getString(None, ["id"]))
        self.utils.add(self.index, "workflow_step", wfStep)
        self.utils.add(self.index, "workflow_step_label",
                       wfMeta.getString(None, ["label"]))
        for group in workflow_security:
            self.utils.add(self.index, "workflow_security", group)
            if self.owner is not None:
                self.utils.add(self.index, "workflow_security", self.owner)
        # set OAI-PMH status to deleted
        if wfStep == "retired":
            self.utils.add(self.index, "oai_deleted", "true")
コード例 #44
0
def start_pr_release(proj_name,
                     repo_name,
                     pr_number,
                     pr_title,
                     comment,
                     source_hash,
                     target_hash,
                     tag='pull_request_merger'):
    pr_templates = templateApi.getTemplates(tag)
    if not pr_templates:
        raise Exception(
            'Could not find any templates by tag [pull_request_merger]. '
            'Did the xlr-development-workflow-plugin initializer run?')
    else:
        if len(pr_templates) > 1:
            logger.warn(
                "Found more than one template with tag '%s', using the first one"
                % tag)
        template_id = pr_templates[0].id

    params = StartRelease()
    params.setReleaseTitle('Pull Request #%s: %s' % (pr_number, pr_title))
    variables = HashMap()
    variables.put('${pull_request_number}', '%s' % pr_number)
    variables.put('${pull_request_title}', '%s' % pr_title)
    variables.put('${repository_name}', '%s' % repo_name)
    variables.put('${pull_request_comment}', '%s' % comment)
    variables.put('${proj_name}', '%s' % proj_name)
    variables.put('${source_hash}', '%s' % source_hash)
    variables.put('${target_hash}', '%s' % target_hash)
    params.setReleaseVariables(variables)
    started_release = templateApi.start(template_id, params)
    response.entity = started_release
    logger.info("Started release %s for Pull Request %s" %
                (started_release.getId(), pr_number))
コード例 #45
0
 def __build_headers(self):
     from java.util import HashMap
     m = HashMap()
     m.put("Accept", "application/json")
     m.put("Content-Type", "application/json")
     return m
コード例 #46
0
    def authenticate(self, configurationAttributes, requestParameters, step):
        identity = CdiUtil.bean(Identity)
        userService = CdiUtil.bean(UserService)
        authenticationService = CdiUtil.bean(AuthenticationService)

        mapUserDeployment = False
        enrollUserDeployment = False
        if (configurationAttributes.containsKey("gplus_deployment_type")):
            deploymentType = StringHelper.toLowerCase(configurationAttributes.get("gplus_deployment_type").getValue2())
            
            if (StringHelper.equalsIgnoreCase(deploymentType, "map")):
                mapUserDeployment = True
            if (StringHelper.equalsIgnoreCase(deploymentType, "enroll")):
                enrollUserDeployment = True

        if (step == 1):
            print "Google+ Authenticate for step 1"
 
            gplusAuthCodeArray = requestParameters.get("gplus_auth_code")
            gplusAuthCode = gplusAuthCodeArray[0]

            # Check if user uses basic method to log in
            useBasicAuth = False
            if (StringHelper.isEmptyString(gplusAuthCode)):
                useBasicAuth = True

            # Use basic method to log in
            if (useBasicAuth):
                print "Google+ Authenticate for step 1. Basic authentication"
        
                identity.setWorkingParameter("gplus_count_login_steps", 1)
        
                credentials = identity.getCredentials()

                userName = credentials.getUsername()
                userPassword = credentials.getPassword()
        
                loggedIn = False
                if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)):
                    userService = CdiUtil.bean(UserService)
                    loggedIn = authenticationService.authenticate(userName, userPassword)
        
                if (not loggedIn):
                    return False
        
                return True

            # Use Google+ method to log in
            print "Google+ Authenticate for step 1. gplusAuthCode:", gplusAuthCode

            currentClientSecrets = self.getCurrentClientSecrets(self.clientSecrets, configurationAttributes, requestParameters)
            if (currentClientSecrets == None):
                print "Google+ Authenticate for step 1. Client secrets configuration is invalid"
                return False
            
            print "Google+ Authenticate for step 1. Attempting to gets tokens"
            tokenResponse = self.getTokensByCode(self.clientSecrets, configurationAttributes, gplusAuthCode)
            if ((tokenResponse == None) or (tokenResponse.getIdToken() == None) or (tokenResponse.getAccessToken() == None)):
                print "Google+ Authenticate for step 1. Failed to get tokens"
                return False
            else:
                print "Google+ Authenticate for step 1. Successfully gets tokens"

            jwt = Jwt.parse(tokenResponse.getIdToken())
            # TODO: Validate ID Token Signature  

            gplusUserUid = jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER)
            print "Google+ Authenticate for step 1. Found Google user ID in the ID token: '%s'" % gplusUserUid
            
            if (mapUserDeployment):
                # Use mapping to local IDP user
                print "Google+ Authenticate for step 1. Attempting to find user by oxExternalUid: 'gplus:%s'" % gplusUserUid

                # Check if there is user with specified gplusUserUid
                foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid)

                if (foundUser == None):
                    print "Google+ Authenticate for step 1. Failed to find user"
                    print "Google+ Authenticate for step 1. Setting count steps to 2"
                    identity.setWorkingParameter("gplus_count_login_steps", 2)
                    identity.setWorkingParameter("gplus_user_uid", gplusUserUid)
                    return True

                foundUserName = foundUser.getUserId()
                print "Google+ Authenticate for step 1. foundUserName: '******'" % foundUserName
                
                userAuthenticated = authenticationService.authenticate(foundUserName)
                if (userAuthenticated == False):
                    print "Google+ Authenticate for step 1. Failed to authenticate user"
                    return False
            
                print "Google+ Authenticate for step 1. Setting count steps to 1"
                identity.setWorkingParameter("gplus_count_login_steps", 1)

                postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser)
                print "Google+ Authenticate for step 1. postLoginResult: '%s'" % postLoginResult

                return postLoginResult
            elif (enrollUserDeployment):
                # Use auto enrollment to local IDP
                print "Google+ Authenticate for step 1. Attempting to find user by oxExternalUid: 'gplus:%s'" % gplusUserUid
 
                # Check if there is user with specified gplusUserUid
                foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid)
 
                if (foundUser == None):
                    # Auto user enrollemnt
                    print "Google+ Authenticate for step 1. There is no user in LDAP. Adding user to local LDAP"

                    print "Google+ Authenticate for step 1. Attempting to gets user info"
                    userInfoResponse = self.getUserInfo(currentClientSecrets, configurationAttributes, tokenResponse.getAccessToken())
                    if ((userInfoResponse == None) or (userInfoResponse.getClaims().size() == 0)):
                        print "Google+ Authenticate for step 1. Failed to get user info"
                        return False
                    else:
                        print "Google+ Authenticate for step 1. Successfully gets user info"
                    
                    gplusResponseAttributes = userInfoResponse.getClaims()
 
                    # Convert Google+ user claims to lover case
                    gplusResponseNormalizedAttributes = HashMap()
                    for gplusResponseAttributeEntry in gplusResponseAttributes.entrySet():
                        gplusResponseNormalizedAttributes.put(
                            StringHelper.toLowerCase(gplusResponseAttributeEntry.getKey()), gplusResponseAttributeEntry.getValue())
 
                    currentAttributesMapping = self.getCurrentAttributesMapping(self.attributesMapping, configurationAttributes, requestParameters)
                    print "Google+ Authenticate for step 1. Using next attributes mapping '%s'" % currentAttributesMapping
 
                    newUser = User()
                    for attributesMappingEntry in currentAttributesMapping.entrySet():
                        remoteAttribute = attributesMappingEntry.getKey()
                        localAttribute = attributesMappingEntry.getValue()
 
                        localAttributeValue = gplusResponseNormalizedAttributes.get(remoteAttribute)
                        if (localAttribute != None):
                            newUser.setAttribute(localAttribute, localAttributeValue)
 
                    if (newUser.getAttribute("sn") == None):
                        newUser.setAttribute("sn", gplusUserUid)
 
                    if (newUser.getAttribute("cn") == None):
                        newUser.setAttribute("cn", gplusUserUid)

                    # Add mail to oxTrustEmail so that the user's
                    # email is available through the SCIM interface
                    # too.
                    if (newUser.getAttribute("oxTrustEmail") is None and
                        newUser.getAttribute("mail") is not None):
                        oxTrustEmail = {
                            "value": newUser.getAttribute("mail"),
                            "display": newUser.getAttribute("mail"),
                            "primary": True,
                            "operation": None,
                            "reference": None,
                            "type": "other"
                        }
                        newUser.setAttribute("oxTrustEmail", json.dumps(oxTrustEmail))

                    newUser.setAttribute("oxExternalUid", "gplus:" + gplusUserUid)
                    print "Google+ Authenticate for step 1. Attempting to add user '%s' with next attributes '%s'" % (gplusUserUid, newUser.getCustomAttributes())
 
                    foundUser = userService.addUser(newUser, True)
                    print "Google+ Authenticate for step 1. Added new user with UID: '%s'" % foundUser.getUserId()

                foundUserName = foundUser.getUserId()
                print "Google+ Authenticate for step 1. foundUserName: '******'" % foundUserName

                userAuthenticated = authenticationService.authenticate(foundUserName)
                if (userAuthenticated == False):
                    print "Google+ Authenticate for step 1. Failed to authenticate user"
                    return False

                print "Google+ Authenticate for step 1. Setting count steps to 1"
                identity.setWorkingParameter("gplus_count_login_steps", 1)

                print "Google+ Authenticate for step 1. Attempting to run extension postLogin"
                postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser)
                print "Google+ Authenticate for step 1. postLoginResult: '%s'" % postLoginResult

                return postLoginResult
            else:
                # Check if there is user with specified gplusUserUid
                print "Google+ Authenticate for step 1. Attempting to find user by uid: '%s'" % gplusUserUid

                foundUser = userService.getUser(gplusUserUid)
                if (foundUser == None):
                    print "Google+ Authenticate for step 1. Failed to find user"
                    return False

                foundUserName = foundUser.getUserId()
                print "Google+ Authenticate for step 1. foundUserName: '******'" % foundUserName

                userAuthenticated = authenticationService.authenticate(foundUserName)
                if (userAuthenticated == False):
                    print "Google+ Authenticate for step 1. Failed to authenticate user"
                    return False

                print "Google+ Authenticate for step 1. Setting count steps to 1"
                identity.setWorkingParameter("gplus_count_login_steps", 1)

                postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser)
                print "Google+ Authenticate for step 1. postLoginResult: '%s'" % postLoginResult

                return postLoginResult
        elif (step == 2):
            print "Google+ Authenticate for step 2"
            
            sessionAttributes = identity.getSessionId().getSessionAttributes()
            if (sessionAttributes == None) or not sessionAttributes.containsKey("gplus_user_uid"):
                print "Google+ Authenticate for step 2. gplus_user_uid is empty"
                return False

            gplusUserUid = sessionAttributes.get("gplus_user_uid")
            passed_step1 = StringHelper.isNotEmptyString(gplusUserUid)
            if (not passed_step1):
                return False

            identity = CdiUtil.bean(Identity)
            credentials = identity.getCredentials()

            userName = credentials.getUsername()
            userPassword = credentials.getPassword()

            loggedIn = False
            if (StringHelper.isNotEmptyString(userName) and StringHelper.isNotEmptyString(userPassword)):
                loggedIn = authenticationService.authenticate(userName, userPassword)

            if (not loggedIn):
                return False

            # Check if there is user which has gplusUserUid
            # Avoid mapping Google account to more than one IDP account
            foundUser = userService.getUserByAttribute("oxExternalUid", "gplus:" + gplusUserUid)

            if (foundUser == None):
                # Add gplusUserUid to user one id UIDs
                foundUser = userService.addUserAttribute(userName, "oxExternalUid", "gplus:" + gplusUserUid)
                if (foundUser == None):
                    print "Google+ Authenticate for step 2. Failed to update current user"
                    return False

                postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser)
                print "Google+ Authenticate for step 2. postLoginResult: '%s'" % postLoginResult

                return postLoginResult
            else:
                foundUserName = foundUser.getUserId()
                print "Google+ Authenticate for step 2. foundUserName: '******'" % foundUserName
    
                if StringHelper.equals(userName, foundUserName):
                    postLoginResult = self.extensionPostLogin(configurationAttributes, foundUser)
                    print "Google+ Authenticate for step 2. postLoginResult: '%s'" % postLoginResult
    
                    return postLoginResult
        
            return False
        else:
            return False
コード例 #47
0
def make(net, preName='pre', postName='post', rate=5e-4):

    # get pre and post ensembles from their names
    pre = net.network.getNode(preName)
    post = net.network.getNode(postName)

    dim_pre = pre.getDimension()
    dim_post = post.getDimension()

    t = [[0] * dim_pre for i in range(dim_post)]
    index_pre = range(dim_pre)
    index_post = range(dim_post)
    for i in range(max(len(index_pre), len(index_post))):
        ipre = index_pre[i % len(index_pre)]
        ipost = index_post[i % len(index_post)]
        t[ipost][ipre] = 1

    decoder = pre.getOrigin('X').getDecoders()
    encoder = post.getEncoders()
    encoder = MU.prod(encoder, 1.0 / post.getRadii()[0])
    weight = MU.prod(encoder, MU.prod(t, MU.transpose(decoder)))

    # random weight matrix to initialize projection from pre to post
    # def rand_weights(w):
    #     for i in range(len(w)):
    #         for j in range(len(w[0])):
    #             w[i][j] = random.uniform(-1e-3,1e-3)
    #     return w
    # weight = rand_weights(numeric.zeros((post.neurons, pre.neurons)).tolist())

    # non-decoded termination (to learn transformation)
    count = 0
    prename = pre.getName()
    while '%s_%02d' % (prename, count) in [t.name for t in post.terminations]:
        count = count + 1
    prename = '%s_%02d' % (prename, count)

    post.addBCMTermination(prename, weight, 0.005, False, None)

    # Add projections
    net.connect(pre.getOrigin('AXON'), post.getTermination(prename))

    # Set learning rule on the non-decoded termination
    net.learn(post, prename, None, rate=rate)

    if net.network.getMetaData("bcmterm") == None:
        net.network.setMetaData("bcmterm", HashMap())
    bcmterms = net.network.getMetaData("bcmterm")

    bcmterm = HashMap(4)
    bcmterm.put("preName", preName)
    bcmterm.put("postName", postName)
    bcmterm.put("rate", rate)

    bcmterms.put(prename, bcmterm)

    if net.network.getMetaData("templates") == None:
        net.network.setMetaData("templates", ArrayList())
    templates = net.network.getMetaData("templates")
    templates.add(prename)

    if net.network.getMetaData("templateProjections") == None:
        net.network.setMetaData("templateProjections", HashMap())
    templateproj = net.network.getMetaData("templateProjections")
    templateproj.put(preName, postName)
コード例 #48
0
    def authenticate(self, configurationAttributes, requestParameters, step):
        identity = CdiUtil.bean(Identity)
        credentials = identity.getCredentials()

        userService = CdiUtil.bean(UserService)
        requestParameterService = CdiUtil.bean(RequestParameterService)
        authenticationService = CdiUtil.bean(AuthenticationService)
        httpService = CdiUtil.bean(HttpService)

        if step == 1:
            print "CAS2. Authenticate for step 1"
            ticket_array = requestParameters.get("ticket")
            if ArrayHelper.isEmpty(ticket_array):
                print "CAS2. Authenticate for step 1. ticket is empty"
                return False

            ticket = ticket_array[0]
            print "CAS2. Authenticate for step 1. ticket: " + ticket

            if StringHelper.isEmptyString(ticket):
                print "CAS2. Authenticate for step 1. ticket is invalid"
                return False

            # Validate ticket
            facesContext = CdiUtil.bean(FacesContext)
            request = facesContext.getExternalContext().getRequest()

            parametersMap = HashMap()
            parametersMap.put(
                "service",
                httpService.constructServerUrl(request) + "/postlogin.htm")
            if self.cas_renew_opt:
                parametersMap.put("renew", "true")
            parametersMap.put("ticket", ticket)
            cas_service_request_uri = requestParameterService.parametersAsString(
                parametersMap)
            cas_service_request_uri = self.cas_host + "/serviceValidate?" + cas_service_request_uri
            if self.cas_extra_opts != None:
                cas_service_request_uri = cas_service_request_uri + "&" + self.cas_extra_opts

            print "CAS2. Authenticate for step 1. cas_service_request_uri: " + cas_service_request_uri

            http_client = httpService.getHttpsClient()
            http_service_response = httpService.executeGet(
                http_client, cas_service_request_uri)
            try:
                validation_content = httpService.convertEntityToString(
                    httpService.getResponseContent(
                        http_service_response.getHttpResponse()))
            finally:
                http_service_response.closeConnection()

            print "CAS2. Authenticate for step 1. validation_content: " + validation_content
            if StringHelper.isEmpty(validation_content):
                print "CAS2. Authenticate for step 1. Ticket validation response is invalid"
                return False

            cas2_auth_failure = self.parse_tag(validation_content,
                                               "cas:authenticationFailure")
            print "CAS2. Authenticate for step 1. cas2_auth_failure: ", cas2_auth_failure

            cas2_user_uid = self.parse_tag(validation_content, "cas:user")
            print "CAS2. Authenticate for step 1. cas2_user_uid: ", cas2_user_uid

            if (cas2_auth_failure != None) or (cas2_user_uid == None):
                print "CAS2. Authenticate for step 1. Ticket is invalid"
                return False

            if self.cas_map_user:
                print "CAS2. Authenticate for step 1. Attempting to find user by oxExternalUid: cas2:" + cas2_user_uid

                # Check if the is user with specified cas2_user_uid
                find_user_by_uid = userService.getUserByAttribute(
                    "oxExternalUid", "cas2:" + cas2_user_uid)

                if find_user_by_uid == None:
                    print "CAS2. Authenticate for step 1. Failed to find user"
                    print "CAS2. Authenticate for step 1. Setting count steps to 2"
                    identity.setWorkingParameter("cas2_count_login_steps", 2)
                    identity.setWorkingParameter("cas2_user_uid",
                                                 cas2_user_uid)
                    return True

                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 1. found_user_name: " + found_user_name

                authenticationService.authenticate(found_user_name)

                print "CAS2. Authenticate for step 1. Setting count steps to 1"
                identity.setWorkingParameter("cas2_count_login_steps", 1)

                return True
            else:
                print "CAS2. Authenticate for step 1. Attempting to find user by uid:" + cas2_user_uid

                # Check if there is user with specified cas2_user_uid
                find_user_by_uid = userService.getUser(cas2_user_uid)
                if find_user_by_uid == None:
                    print "CAS2. Authenticate for step 1. Failed to find user"
                    return False

                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 1. found_user_name: " + found_user_name

                authenticationService.authenticate(found_user_name)

                print "CAS2. Authenticate for step 1. Setting count steps to 1"
                identity.setWorkingParameter("cas2_count_login_steps", 1)

                return True
        elif step == 2:
            print "CAS2. Authenticate for step 2"

            if identity.isSetWorkingParameter("cas2_user_uid"):
                print "CAS2. Authenticate for step 2. cas2_user_uid is empty"
                return False

            cas2_user_uid = identity.getWorkingParameter("cas2_user_uid")
            passed_step1 = StringHelper.isNotEmptyString(cas2_user_uid)
            if not passed_step1:
                return False

            user_name = credentials.getUsername()
            user_password = credentials.getPassword()

            logged_in = False
            if StringHelper.isNotEmptyString(
                    user_name) and StringHelper.isNotEmptyString(
                        user_password):
                logged_in = authenticationService.authenticate(
                    user_name, user_password)

            if not logged_in:
                return False

            # Check if there is user which has cas2_user_uid
            # Avoid mapping CAS2 account to more than one IDP account
            find_user_by_uid = userService.getUserByAttribute(
                "oxExternalUid", "cas2:" + cas2_user_uid)

            if find_user_by_uid == None:
                # Add cas2_user_uid to user one id UIDs
                find_user_by_uid = userService.addUserAttribute(
                    user_name, "oxExternalUid", "cas2:" + cas2_user_uid)
                if find_user_by_uid == None:
                    print "CAS2. Authenticate for step 2. Failed to update current user"
                    return False

                return True
            else:
                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 2. found_user_name: " + found_user_name

                if StringHelper.equals(user_name, found_user_name):
                    return True

            return False
        else:
            return False
コード例 #49
0
 def genernate_gongzuoshi_content(self, webpart):
     cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix
     content = cache.get(cache_key)
     if content != None:
         request.setAttribute(cache_key, content)
         return
     
     map = HashMap()
     new_blog_list = self.get_new_list(4)
     map.put("new_blog_list", new_blog_list)
     
     hot_blog_list = self.get_hot_list(4)
     map.put("hot_blog_list", hot_blog_list)        
    
     rcmd_blog_list = self.get_rcmd_list(4)
     map.put("rcmd_blog_list", rcmd_blog_list)
     
     map.put("subject", self.subject)
     map.put("SubjectRootUrl", self.subjectRootUrl)
     map.put("webpart", webpart)
     map.put("unitId", self.unitId)
     content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/gongzuoshi.ftl", "utf-8")
     request.setAttribute(cache_key, content)
     cache.put(cache_key, content)
     
     #print "qry.metaSubjectId:", qry.metaSubjectId
     #print "qry.metaGradeId:", qry.metaGradeId
     self.statService.subjectStat(self.metaSubjectId, self.metaGradeId, self.metaGradeId + 1000)
コード例 #50
0
(options, args) = parser.parse_args(args=sys.argv[1:])

if len(args) != 2:
    parser.print_usage()
    sys.exit(1)

xmlDir = args[0]
outDir = args[1]

mesh = Mesh()
MeshReader.readObject3D(mesh, xmlDir)
liaison = MeshLiaison(mesh)
if options.immutable_border:
    liaison.mesh.tagFreeEdges(AbstractHalfEdge.IMMUTABLE)
if options.coplanarity:
    liaison.getMesh().buildRidges(options.coplanarity)
if options.preserveGroups:
    liaison.getMesh().buildGroupBoundaries()

opts = HashMap()
opts.put("coplanarity", str(options.coplanarity))
if options.min_quality_factor:
    opts.put("minQualityFactor", str(options.min_quality_factor))
if options.no_expect_insert:
    opts.put("expectInsert", "false")
sm = SwapEdge(liaison, opts)
sm.setProgressBarStatus(10000)
sm.compute()

MeshWriter.writeObject3D(liaison.getMesh(), outDir, String())
コード例 #51
0
 def genernate_xiezuozu_content(self, webpart):
     cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix
     content = cache.get(cache_key)
     if content != None:
         request.setAttribute(cache_key, content)
         return
     
     map = HashMap()
     qry = GroupQuery("""  g.groupName,g.groupId, g.groupIcon, g.groupTitle, g.createDate, g.groupIntroduce """)
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     new_group_list = qry.query_map(4)
     map.put("new_group_list", new_group_list)
     
     qry = GroupQuery("""  g.groupName,g.groupIcon, g.createDate, g.groupId, g.groupTitle, g.groupIntroduce """)
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.orderType = 8
     hot_group_list = qry.query_map(4)
     map.put("hot_group_list", hot_group_list)
     
     Qry = GroupQuery("""  g.groupName,g.groupId, g.groupIcon, g.groupTitle, g.createDate, g.groupIntroduce """)
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.isRecommend = True
     rcmd_group_list = qry.query_map(4)
     map.put("rcmd_group_list", rcmd_group_list)   
 
     map.put("subject", self.subject)
     map.put("SubjectRootUrl", self.subjectRootUrl)
     map.put("webpart", webpart)
     map.put("unitId", self.unitId)
     content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/xiezuozu.ftl", "utf-8")
     request.setAttribute(cache_key, content)
     cache.put(cache_key, content)
コード例 #52
0
ファイル: refine.py プロジェクト: ikanaris/jCAE-1
	liaison.getMesh().getTrace().createMesh("mesh", liaison.getMesh())
if options.immutable_border:
	liaison.mesh.tagFreeEdges(AbstractHalfEdge.IMMUTABLE)
if options.coplanarity:
	liaison.getMesh().buildRidges(options.coplanarity)
if options.preserveGroups:
	liaison.getMesh().buildGroupBoundaries()

if options.recordFile:
	cmds = [ String("assert self.m.checkNoDegeneratedTriangles()"), String("assert self.m.checkNoInvertedTriangles()"), String("assert self.m.checkVertexLinks()"), String("assert self.m.isValid()") ]
	liaison.getMesh().getTrace().setHooks(cmds)

opts = HashMap()
setAnalytic = False
if options.size:
	opts.put("size", str(options.size))
elif options.metricsFile:
	opts.put("metricsFile", options.metricsFile)
else:
	setAnalytic = True
if options.coplanarity:
	opts.put("coplanarity", str(options.coplanarity))
if options.project:
	opts.put("project", "true")
if options.allowNearNodes:
	opts.put("allowNearNodes", "true")
if options.features:
	opts.put("features", "true")
opts.put("nearLengthRatio", str(options.nearLengthRatio))

if options.decimateSize or options.decimateTarget:
コード例 #53
0
 def genernate_resoure_content(self, webpart):
     cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix
     content = cache.get(cache_key)
     if content != None:
         request.setAttribute(cache_key, content)
         return
     
     map = HashMap()        
     qry = ResourceQuery(""" r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
     u.loginName, u.nickName, r.subjectId as subjectId, grad.gradeName, sc.name as scName """)
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " r.approvedPathInfo LIKE '%/" + str(self.unitId) + "/%'"
     new_resource_list = qry.query_map(10)
     map.put("new_resource_list", new_resource_list)
     
     qry = ResourceQuery(""" r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
     u.loginName, u.nickName, msubj.msubjName, grad.gradeName, sc.name as scName """)
     qry.orderType = 4       # downloadCount DESC
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " r.approvedPathInfo LIKE '%/" + str(self.unitId) + "/%'"
     hot_resource_list = qry.query_map(10)
     map.put("hot_resource_list", hot_resource_list)
     
     #hot_resource_list = self.viewcount_svc.getViewCountListShared(12,7,10,self.unit.unitPath,self.unit.unitDepth);
     #map.put("hot_resource_list", hot_resource_list)
     
     # �Ƽ���Դ
     qry = ResourceQuery(""" r.resourceId, r.title, r.href, r.createDate, r.fsize, r.downloadCount, 
     u.loginName, u.nickName, msubj.msubjName, grad.gradeName, sc.name as scName """)
     qry.rcmdState = True
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " r.rcmdPathInfo LIKE '%/" + str(self.unitId) + "/%'"
     rcmd_resource_list = qry.query_map(10)
     map.put("rcmd_resource_list", rcmd_resource_list)    
    
     map.put("subject", self.subject)
     map.put("SubjectRootUrl", self.subjectRootUrl)
     map.put("webpart", webpart)
     map.put("unitId", self.unitId)
     content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/resource.ftl", "utf-8")
     request.setAttribute(cache_key, content)
     cache.put(cache_key, content)
コード例 #54
0
    def getJdbcResources(self, env, jdbcOshMap, globalJdbcResources=None):
        if env is None:
            return
        jdbcResources = HashMap()

        resources = env.getChildren('Resource')
        for resource in resources:
            name = resource.getAttributeValue('name')
            dsType = resource.getAttributeValue('type')
            driverClassName = resource.getAttributeValue('driverClassName')
            url = resource.getAttributeValue('url')
            maxActive = resource.getAttributeValue('maxActive')
            logger.debug('Found jdbc datasource ', name, ' driver ',
                         str(driverClassName), ' url ', str(url))
            jdbcResources.put(
                name,
                JdbcResource(name, dsType, driverClassName, url, maxActive))

        for resource in resources:
            name = resource.getAttributeValue('name')
            if name is None:
                continue
            # do not read additional parameters for non-existing resource
            jdbcResource = jdbcResources.get(name)
            if jdbcResource is None:
                continue

            # update existing JDBC resource with absent parameters data
            for resourceParamsEl in env.getChildren('ResourceParams'):
                if resourceParamsEl.getAttributeValue('name') == name:

                    resourceParams = self.getResourceParamsValues(
                        resourceParamsEl)
                    dsType = resourceParams.get('type')
                    if (dsType is not None) and (jdbcResource.type is None):
                        jdbcResource.type = dsType

                    driverClassName = resourceParams.get('driverClassName')
                    if (driverClassName is not None) and (
                            jdbcResource.driverClass is None):
                        jdbcResource.driverClass = driverClassName

                    url = resourceParams.get('url')
                    if (url is not None) and (jdbcResource.url is None):
                        jdbcResource.url = url

                    maxActive = resourceParams.get('maxActive')
                    if (maxActive
                            is not None) and (jdbcResource.maxActive is None):
                        jdbcResource.maxActive = maxActive

                    if jdbcResource.type != 'javax.sql.DataSource':
                        jdbcResources.remove(name)

        resources = env.getChildren('ResourceLink')
        for resource in resources:
            name = resource.getAttributeValue('name')
            globalName = resource.getAttributeValue('global')
            dsType = resource.getAttributeValue('type')
            logger.debug('Found resource link ', name, ' for global name ',
                         globalName, ' of type ', dsType)
            if dsType != 'javax.sql.DataSource':
                continue
            if globalJdbcResources is not None:
                jdbcResource = globalJdbcResources.get(globalName)
            if jdbcResource is None:
                continue
            logger.debug('Found jdbc datastore with global name ', globalName)
            jdbcOshMap.put(name, jdbcResource)

        dnsResolver = _DnsResolverDecorator(netutils.JavaDnsResolver(),
                                            self.destinationIp)
        reporter = jdbcModule.DnsEnabledJdbcTopologyReporter(
            jdbcModule.DataSourceBuilder(), dnsResolver)

        class Container:
            def __init__(self, osh):
                self.osh = osh

            def getOsh(self):
                return self.osh

        container = Container(self.tomcatOsh)
        for jdbc in jdbcResources.values():
            datasource = jdbcModule.Datasource(jdbc.name,
                                               jdbc.url,
                                               driverClass=jdbc.driverClass)
            self.OSHVResult.addAll(
                reporter.reportDatasources(container, datasource))
            jdbcOshMap.put(jdbc.name, datasource.getOsh())
コード例 #55
0
 def genernate_article_content(self, webpart):
     cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix
     content = cache.get(cache_key)
     if content != None:
         request.setAttribute(cache_key, content)
         return
     
     map = HashMap()
     qry = ArticleQuery("a.articleId, a.title, a.createDate, a.typeState, a.userId, a.loginName, a.userTrueName")
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " a.approvedPathInfo LIKE '%/" + str(self.unitId) + "/%'"
         
     newest_article_list = qry.query_map(10)
     map.put("newest_article_list", newest_article_list)        
     #hot_article_list=self.viewcount_svc.getViewCountListShared(3,7,10,self.unit.unitPath,self.unit.unitDepth);
     #map.put("hot_article_list",hot_article_list)
     
     qry = ArticleQuery(""" a.articleId, a.title, a.createDate,a.typeState, a.userId, a.loginName, a.userTrueName """)
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     qry.orderType = 2
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " a.approvedPathInfo LIKE '%/" + str(self.unitId) + "/%'"
     hot_article_list = qry.query_map(10)
     map.put("hot_article_list", hot_article_list)        
     
     qry = ArticleQuery(""" a.articleId, a.title, a.createDate, a.typeState, a.userId, a.loginName, a.userTrueName """)
     qry.rcmdState = True
     qry.subjectId = self.metaSubjectId
     qry.gradeId = self.metaGradeId
     qry.FuzzyMatch = True
     if self.unitId != None and self.unitId != 0:
         qry.custormAndWhereClause = " a.rcmdPathInfo LIKE '%/" + str(self.unitId) + "/%'"
        
     rcmd_article_list = qry.query_map(10)
     map.put("rcmd_article_list", rcmd_article_list)
     
     map.put("subject", self.subject)
     map.put("webpart", webpart)
     map.put("unitId", self.unitId)
     map.put("SubjectRootUrl", self.subjectRootUrl)
     content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/article.ftl", "utf-8")
     request.setAttribute(cache_key, content)
     cache.put(cache_key, content)
コード例 #56
0
 def execute(self):
     if self.subject == None:
         self.addActionError("Object not be found !")
         return self.ERROR
     cacheCount = cache.get("cacheCount")
     print "cacheCount",cacheCount
     if cacheCount == None:
         timerCountService = __spring__.getBean("timerCountService")
         timerCountService.doSubjectCount(self.subject)
         self.subjectService.clearCacheData()
         self.subject = self.subjectService.getSubjectById(self.subject.subjectId)        
     
     shortcutTarget = self.subject.shortcutTarget
     if shortcutTarget != None:
         response.sendRedirect(shortcutTarget)
         return
     
     if self.unitId != None and self.unitId != 0:
         self.cacheKeyFix = "_" + str(self.unitId)
     
     
     self.templateName = self.subject.templateName        
     if self.templateName == None or self.templateName == "":
         self.templateName = "template1"
     
     theme = self.params.safeGetStringParam("theme")
     if theme != "":
         request.setAttribute("theme", theme)
     webpartList = self.subjectService.getSubjectWebpartList(self.subject.subjectId, True)
     if self.params.existParam("tm") == False:
         if len(webpartList) < 1:
             self.genWebparts()
             self.addSubjectNav()
             response.sendRedirect("?tm=1")
             return
     
     
     for webpart in webpartList:
         self.set_webpart_flag(webpart)
         if webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_ARTICLE:
            self.genernate_article_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_RESOURCE:
             self.genernate_resoure_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_JIAOYANSHIPIN:
             self.genernate_jiaoyanshipin_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_PICNEWS:
             self.genernate_picnews_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_NEWS:
             self.genernate_news_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_NOTICE:
             self.genernate_notice_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_LINKS:
             self.genernate_links_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_STATISTICS:
             self.genernate_statistics_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_VOTE:
             self.genernate_vote_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_JIAOYANYUAN:
             self.genernate_jiaoyanyuan_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_MINGSHI:
             self.genernate_mingshi_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_NODULENAME_DAITOUREN:
             self.genernate_daitouren_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_GONGZUOSHI:
             self.genernate_gongzuoshi_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_XIEZUOZU:
             self.genernate_xiezuozu_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_JIAOYANHUODONG:
             self.genernate_jiaoyanhuodong_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_JIAOYANZHUANTI:
             self.genernate_jiaoyanzhuanti_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_WENDA:
             self.genernate_wenda_content(webpart)
         elif webpart.moduleName == SubjectWebpart.WEBPART_MODULENAME_TOPIC:
             self.genernate_topic_content(webpart)
         else:
             cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix
             content = cache.get(cache_key)
             if content != None:
                 request.setAttribute(cache_key, content)
             else:
                 map = HashMap()
                 map.put("subject", self.subject.subjectId)
                 map.put("webpart", webpart)
                 map.put("unitId", self.unitId)
                 map.put("SubjectRootUrl", self.subjectRootUrl)
                 content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/custorm.ftl", "utf-8")
                 request.setAttribute(cache_key, content)
                 cache.put(cache_key, content)
     
     theme = self.params.safeGetStringParam("theme")
     request.setAttribute("head_nav", "subject")
     request.setAttribute("subject", self.subject)
     request.setAttribute("webpartList", webpartList)
     if theme != "":
         request.setAttribute("theme", theme)
     
     if self.loginUser != None:
         request.setAttribute("loginUser", self.loginUser)
         preview = self.params.safeGetStringParam("preview")
         if self.isAdmin() == True and preview != "":
             request.setAttribute("role", "admin")        
     
     request.setAttribute("unitId", self.unitId)
     request.setAttribute("req", request)
     return "/WEB-INF/subjectpage/" + self.templateName + "/index.ftl"
コード例 #57
0
ctorMsg.put("function", "init_work_order")
ctorMsg.put("args", args)
params = JSONObject()
params.put("ctorMsg", ctorMsg)
obj = JSONObject()
obj.put('method', 'invoke')
obj.put('params', params)
jsonStr = obj.serialize(True)

FixedLoggers.MAXIMOLOGGER.info("json obj")
FixedLoggers.MAXIMOLOGGER.info(obj)

# post json to chaincode
handler = HTTPHandler()
map = HashMap()
map.put("URL", url + "/api/chaincode")
map.put("HTTPMETHOD", "POST")
map.put("body", jsonStr)
map.put("headers", "Content-Type: application/json")

# init HTTP Client, post JSON to blockchain server
client = DefaultHttpClient()
request = HttpPost(url + "/api/chaincode")
request.addHeader(HttpHeaders.CONTENT_TYPE, "application/json")
request.addHeader(HttpHeaders.ACCEPT, "application/json")
entity = StringEntity(jsonStr, "UTF-8")
request.setEntity(entity)
response = client.execute(request)
status = response.getStatusLine().getStatusCode()

# location = mboSet.getString("LOCATION")
コード例 #58
0
    def authenticate(self, configurationAttributes, requestParameters, step):
        context = Contexts.getEventContext()
        authenticationService = AuthenticationService.instance()
        userService = UserService.instance()
        httpService = HttpService.instance()

        cas_host = configurationAttributes.get("cas_host").getValue2()
        cas_map_user = StringHelper.toBoolean(
            configurationAttributes.get("cas_map_user").getValue2(), False)
        cas_renew_opt = StringHelper.toBoolean(
            configurationAttributes.get("cas_renew_opt").getValue2(), False)

        cas_extra_opts = None
        if (configurationAttributes.containsKey("cas_extra_opts")):
            cas_extra_opts = configurationAttributes.get(
                "cas_extra_opts").getValue2()

        if (step == 1):
            print "CAS2. Authenticate for step 1"
            ticket_array = requestParameters.get("ticket")
            if ArrayHelper.isEmpty(ticket_array):
                print "CAS2. Authenticate for step 1. ticket is empty"
                return False

            ticket = ticket_array[0]
            print "CAS2. Authenticate for step 1. ticket: " + ticket

            if (StringHelper.isEmptyString(ticket)):
                print "CAS2. Authenticate for step 1. ticket is invalid"
                return False

            # Validate ticket
            request = FacesContext.getCurrentInstance().getExternalContext(
            ).getRequest()

            parametersMap = HashMap()
            parametersMap.put(
                "service",
                httpService.constructServerUrl(request) + "/postlogin")
            if (cas_renew_opt):
                parametersMap.put("renew", "true")
            parametersMap.put("ticket", ticket)
            cas_service_request_uri = authenticationService.parametersAsString(
                parametersMap)
            cas_service_request_uri = cas_host + "/serviceValidate?" + cas_service_request_uri
            if (cas_extra_opts != None):
                cas_service_request_uri = cas_service_request_uri + "&" + cas_extra_opts

            print "CAS2. Authenticate for step 1. cas_service_request_uri: " + cas_service_request_uri

            http_client = httpService.getHttpsClient()
            http_service_response = httpService.executeGet(
                http_client, cas_service_request_uri)

            try:
                validation_content = httpService.convertEntityToString(
                    httpService.getResponseContent(
                        http_service_response.getHttpResponse()))
            finally:
                http_service_response.closeConnection()

            print "CAS2. Authenticate for step 1. validation_content: " + validation_content
            if StringHelper.isEmpty(validation_content):
                print "CAS2. Authenticate for step 1. Ticket validation response is invalid"
                return False

            cas2_auth_failure = self.parse_tag(validation_content,
                                               "cas:authenticationFailure")
            print "CAS2. Authenticate for step 1. cas2_auth_failure: ", cas2_auth_failure

            cas2_user_uid = self.parse_tag(validation_content, "cas:user")
            print "CAS2. Authenticate for step 1. cas2_user_uid: ", cas2_user_uid

            if ((cas2_auth_failure != None) or (cas2_user_uid == None)):
                print "CAS2. Authenticate for step 1. Ticket is invalid"
                return False

            if (cas_map_user):
                print "CAS2. Authenticate for step 1. Attempting to find user by oxExternalUid: cas2:" + cas2_user_uid

                # Check if the is user with specified cas2_user_uid
                find_user_by_uid = userService.getUserByAttribute(
                    "oxExternalUid", "cas2:" + cas2_user_uid)

                if (find_user_by_uid == None):
                    print "CAS2. Authenticate for step 1. Failed to find user"
                    print "CAS2. Authenticate for step 1. Setting count steps to 2"
                    context.set("cas2_count_login_steps", 2)
                    context.set("cas2_user_uid", cas2_user_uid)
                    return True

                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 1. found_user_name: " + found_user_name

                credentials = Identity.instance().getCredentials()
                credentials.setUsername(found_user_name)
                credentials.setUser(find_user_by_uid)

                print "CAS2. Authenticate for step 1. Setting count steps to 1"
                context.set("cas2_count_login_steps", 1)

                return True
            else:
                print "CAS2. Authenticate for step 1. Attempting to find user by uid:" + cas2_user_uid

                # Check if the is user with specified cas2_user_uid
                find_user_by_uid = userService.getUser(cas2_user_uid)
                if (find_user_by_uid == None):
                    print "CAS2. Authenticate for step 1. Failed to find user"
                    return False

                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 1. found_user_name: " + found_user_name

                credentials = Identity.instance().getCredentials()
                credentials.setUsername(found_user_name)
                credentials.setUser(find_user_by_uid)

                print "CAS2. Authenticate for step 1. Setting count steps to 1"
                context.set("cas2_count_login_steps", 1)

                return True
        elif (step == 2):
            print "CAS2. Authenticate for step 2"

            sessionAttributes = context.get("sessionAttributes")
            if (sessionAttributes == None
                ) or not sessionAttributes.containsKey("cas2_user_uid"):
                print "CAS2. Authenticate for step 2. cas2_user_uid is empty"
                return False

            cas2_user_uid = sessionAttributes.get("cas2_user_uid")
            passed_step1 = StringHelper.isNotEmptyString(cas2_user_uid)
            if (not passed_step1):
                return False

            credentials = Identity.instance().getCredentials()
            user_name = credentials.getUsername()
            user_password = credentials.getPassword()

            logged_in = False
            if (StringHelper.isNotEmptyString(user_name)
                    and StringHelper.isNotEmptyString(user_password)):
                logged_in = userService.authenticate(user_name, user_password)

            if (not logged_in):
                return False

            # Check if there is user which has cas2_user_uid
            # Avoid mapping CAS2 account to more than one IDP account
            find_user_by_uid = userService.getUserByAttribute(
                "oxExternalUid", "cas2:" + cas2_user_uid)

            if (find_user_by_uid == None):
                # Add cas2_user_uid to user one id UIDs
                find_user_by_uid = userService.addUserAttribute(
                    user_name, "oxExternalUid", "cas2:" + cas2_user_uid)
                if (find_user_by_uid == None):
                    print "CAS2. Authenticate for step 2. Failed to update current user"
                    return False

                return True
            else:
                found_user_name = find_user_by_uid.getUserId()
                print "CAS2. Authenticate for step 2. found_user_name: " + found_user_name

                if StringHelper.equals(user_name, found_user_name):
                    return True

            return False
        else:
            return False
コード例 #59
0
def make(net,
         errName='error',
         N_err=50,
         preName='pre',
         postName='post',
         rate=5e-4,
         supervisionRatio=0.5,
         theta=None,
         weight=None):

    # get pre and post ensembles from their names
    pre = net.network.getNode(preName)
    post = net.network.getNode(postName)

    # Create error ensemble
    try:
        net.get(errName)  # check if it already exists
    except StructuralException:
        net.make(errName, N_err, post.dimension)

    # modulatory termination (find unused termination)
    count = 0
    while 'mod_%02d' % count in [t.name for t in post.terminations]:
        count = count + 1
    modname = 'mod_%02d' % count
    if net.get(errName).dimension == post.dimension:
        modweights = numeric.eye(post.dimension)
    else:
        modweights = [[1 for i in range(net.get(errName).dimension)]
                      for j in range(post.dimension)]
    mterm = post.addDecodedTermination(modname, modweights, 0.005, True)

    # random weight matrix to initialize projection from pre to post
    if weight == None:

        def rand_weights(w):
            for i in range(len(w)):
                for j in range(len(w[0])):
                    w[i][j] = random.uniform(-1e-3, 1e-3)
            return w

        weight = rand_weights(
            numeric.zeros((post.neurons, pre.neurons)).tolist())

    # non-decoded termination (to learn transformation)
    count = 0
    prename = pre.getName()
    while '%s_%02d' % (prename, count) in [t.name for t in post.terminations]:
        count = count + 1
    prename = '%s_%02d' % (prename, count)

    lterm = post.addHPESTermination(prename, weight, 0.005, False, theta)

    # Add projections
    net.connect(errName, post.getTermination(modname))
    net.connect(pre.getOrigin('AXON'), post.getTermination(prename))

    # Set learning rule on the non-decoded termination
    net.learn(post,
              prename,
              modname,
              rate=rate,
              supervisionRatio=supervisionRatio)

    if net.network.getMetaData("learnedterm") == None:
        net.network.setMetaData("learnedterm", HashMap())
    learnedterms = net.network.getMetaData("learnedterm")

    learnedterm = HashMap(7)
    learnedterm.put("errName", errName)
    learnedterm.put("N_err", N_err)
    learnedterm.put("preName", preName)
    learnedterm.put("postName", postName)
    learnedterm.put("rate", rate)
    learnedterm.put("theta", theta)
    learnedterm.put("supervisionRatio", supervisionRatio)

    learnedterms.put(errName, learnedterm)

    if net.network.getMetaData("templates") == None:
        net.network.setMetaData("templates", ArrayList())
    templates = net.network.getMetaData("templates")
    templates.add(errName)

    if net.network.getMetaData("templateProjections") == None:
        net.network.setMetaData("templateProjections", HashMap())
    templateproj = net.network.getMetaData("templateProjections")
    templateproj.put(errName, postName)
    templateproj.put(preName, postName)

    return lterm, mterm
コード例 #60
0
 def to_map(p_dict):
     map = HashMap()
     for key, value in p_dict.items():
         map.put(key, value)
     return map