예제 #1
0
 def __init__(self, data=None):
     #list of keys is maintained in order to ensure the
     #predictable iteration order
     self.configsMap = LinkedHashMap()
     if data:
         self.configsMap.putAll(data)
     self.initDefaultConfigs()
예제 #2
0
def dictCheck(dictionary, value, subvalue, configName):
    map = LinkedHashMap()
    if type(dictionary) == dict:
        for k in dictionary.keys():
            if type(k) != str:
                raise TypeError, configName + " key [" + ` k ` + "] not a str"
            l = dictionary[k]
            if type(l) != value:
                raise TypeError, configName + " value [" + `l` + "] not a " \
                  + `value`
            if value == list or value == tuple:
                n = ArrayList()
                for m in l:
                    if type(m) != subvalue:
                        raise TypeError, configName + " value [" + `l` \
                          + "] isn't a " + `subvalue` + ": " + `m`
                    elif subvalue == int:
                        n.add(Integer(m))
                    elif subvalue == float:
                        n.add(Float(m))
                    else:
                        n.add(m)
                map.put(k, n)
            else:
                if value == int:
                    map.put(k, Integer(l))
                elif value == float:
                    map.put(k, Float(l))
                else:
                    map.put(k, l)
    else:
        raise TypeError, configName + " not a dict:" + ` dictionary `
    return map
예제 #3
0
class ErrorResolver:
    "Resolves errors to error configs"

    def __init__(self, data=None):
        #list of keys is maintained in order to ensure the
        #predictable iteration order
        self.configsMap = LinkedHashMap()
        if data:
            self.configsMap.putAll(data)
        self.initDefaultConfigs()

    def addConfig(self, pattern, config):
        self.configsMap.put(pattern, config)

    def hasConfig(self, msg):
        iterator = self.configsMap.keySet().iterator()
        while iterator.hasNext():
            pattern = iterator.next()
            if re.search(pattern, msg, re.I | re.M):
                return 1

    def getConfig(self, msg):
        iterator = self.configsMap.keySet().iterator()
        while iterator.hasNext():
            pattern = iterator.next()
            if re.search(pattern, msg, re.I | re.M):
                return self.configsMap.get(pattern)

    def getDefaultConfig(self):
        return self.defaultConfig

    def copy(self):
        return ErrorResolver(self.configsMap)

    def __len__(self):
        return self.configsMap.size()

    def __getitem__(self, key):
        return self.configsMap.get(key)

    def __setitem__(self, key, item):
        self.addConfig(key, item)

    def keys(self):
        return self.configsMap.keySet()

    def initDefaultConfigs(self):
        self.defaultConfigWithDetails = ErrorMessageConfig(
            ERROR_GENERIC_WITH_DETAILS,
            errorcodes.INTERNAL_ERROR_WITH_PROTOCOL_DETAILS)
        self.defaultConfig = ErrorMessageConfig(ERROR_CONNECTION_FAILED,
                                                errorcodes.CONNECTION_FAILED)
        self.defaultConfigNoProtocol = ErrorMessageConfig(
            ERROR_CONNECTION_FAILED_NO_PROTOCOL,
            errorcodes.CONNECTION_FAILED_NO_PROTOCOL)
        self.defaultConfigNoProtocolWithDetails = ErrorMessageConfig(
            ERROR_CONNECTION_FAILED_NO_PROTOCOL_WITH_DETAILS,
            errorcodes.CONNECTION_FAILED_NO_PROTOCOL_WITH_DETAILS)
    def getSuggestedNames(self):
        # search common forms
        lookupNames = []
        surname = self.__metadata.getList("surname").get(0)
        firstName = self.__metadata.getList("firstName").get(0)
        firstInitial = firstName[0].upper()
        secondName = self.__metadata.getList("secondName")
        if not secondName.isEmpty():
            secondName = secondName.get(0)
        if secondName and secondName != "":
            secondInitial = secondName[0].upper()
            lookupNames.append("%s, %s. %s." % (surname, firstInitial, secondInitial))
            lookupNames.append("%s, %s %s." % (surname, firstName, secondInitial))
            lookupNames.append("%s, %s %s" % (surname, firstName, secondName))
            lookupNames.append("%s %s %s" % (firstName, secondName, surname))
        lookupNames.append("%s, %s." % (surname, firstInitial))
        lookupNames.append("%s, %s" % (surname, firstName))
        lookupNames.append("%s %s" % (firstName, surname))
        query = '" OR dc_title:"'.join(lookupNames)

        # general word search from each part of the name
        parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0]
        query2 = " OR dc_title:".join(parts)

        req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5' % (query, query2))
        self.log.info("suggestedNames query={}", req.query)
        req.setParam("fq", 'recordtype:"author"')
        req.addParam("fq", 'item_type:"object"')
        req.setParam("rows", "9999")
        req.setParam("fl", "score")
        req.setParam("sort", "score desc")

        # Make sure 'fq' has already been set in the session
        ##security_roles = self.authentication.get_roles_list();
        ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")'
        ##req.addParam("fq", security_query)

        out = ByteArrayOutputStream()
        indexer = self.services.getIndexer()
        indexer.search(req, out)
        result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray()))

        # self.log.info("result={}", result.toString())
        docs = result.getJsonList("response/docs")

        map = LinkedHashMap()
        for doc in docs:
            authorName = doc.getList("dc_title").get(0)
            if map.containsKey(authorName):
                authorDocs = map.get(authorName)
            else:
                authorDocs = ArrayList()
                map.put(authorName, authorDocs)
            authorDocs.add(doc)

        self.__maxScore = max(1.0, float(result.get("response/maxScore")))

        return map
예제 #5
0
 def getFacetCounts(self, key):
     values = LinkedHashMap()
     valueList = self.__result.getList("facet_counts/facet_fields/%s" % key)
     for i in range(0,len(valueList),2):
         name = valueList[i]
         count = valueList[i+1]
         if count > 0:
             values.put(name, count)
     return values
예제 #6
0
 def getFacetCounts(self, key):
     values = LinkedHashMap()
     valueList = self.__result.getList("facet_counts/facet_fields/%s" % key)
     for i in range(0,len(valueList),2):
         name = valueList[i]
         count = valueList[i+1]
         if (name.find("/") == -1 or self.hasSelectedFacets()) and count > 0:
             values.put(name, count)
     return values
예제 #7
0
 def getFacetCounts(self, key):
     values = LinkedHashMap()
     valueList = self.__result.getList("facet_counts/facet_fields/%s" % key)
     for i in range(0,len(valueList),2):
         name = valueList[i]
         count = valueList[i+1]
         if count > 0:
             values.put(name, count)
     return values
예제 #8
0
def pyDictToJavaMap(pyDict):
    if isinstance(pyDict, OrderedDict):
        jmap = LinkedHashMap()
    else:
        jmap = HashMap()
        
    for key in pyDict:   
        jmap.put(pyValToJavaObj(key), pyValToJavaObj(pyDict[key]))
    return jmap
예제 #9
0
 def getFacetCounts(self, key):
     if self.__useSessionNavigation:
         facetData = self.__result.getFacets()
         if facetData is None:
             return LinkedHashMap()
         if not facetData.containsKey(key):
             return LinkedHashMap()
         return facetData.get(key).values()
     else:
         return LinkedHashMap()
예제 #10
0
def parseSat(satdata):
    rval = LinkedHashMap()
    for e in satdata:
        if type(e) is types.TupleType:
            direct, name = check(e, (str, str),
                                 "Format error in SATDATA entry", satdata)
            rval.put(direct, name)
        else:
            raise SyntaxError, "Invalid syntax for SATDATA" + ` satdata `
    return rval
예제 #11
0
def parseSat(satdirs):
    rval = LinkedHashMap()
    for e in satdirs:
        if type(e) is types.TupleType:
            direct, name = check(e, (str, str),
              "Format error in SATDIRS entry", satdirs)
            rval.put(direct, name)
        else:
            raise SyntaxError, "Invalid syntax for SATDIRS" + `satdirs`
    return rval
예제 #12
0
def dictCheck(dictionary, value, subvalue, configName):
    map = LinkedHashMap()
    if type(dictionary) == dict:
        for k in dictionary.keys():
            if type(k) != str:
                raise TypeError, configName + " key [" + `k` + "] not a str"
            l = dictionary[k]
            if type(l) != value:
                raise TypeError, configName + " value [" + `l` + "] not a " \
                  + `value`
            if value == list or value == tuple:
                n = ArrayList()
                for m in l:
                    if type(m) != subvalue:
                        raise TypeError, configName + " value [" + `l` \
                          + "] isn't a " + `subvalue` + ": " + `m`
                    elif subvalue == int:
                        n.add(Integer(m))
                    elif subvalue == float:
                        n.add(Float(m))
                    else:
                        n.add(m)
                map.put(k, n)
            else:
                if value == int:
                    map.put(k,Integer(l))
                elif value == float:
                    map.put(k,Float(l))
                else:
                    map.put(k, l)
    else:
        raise TypeError, configName + " not a dict:" + `dictionary`
    return map
예제 #13
0
 def __activate__(self, context):
     self.roles = context["page"].authentication.get_roles_list()
     self.config = context["systemConfig"]
     workflows = JsonSimple.toJavaMap(self.config.getObject(["uploader"]))
     self.uploaders = LinkedHashMap()
     
     for workflow in workflows.keySet():
         if workflows.get(workflow).getString("", ["upload-template"]):
             for role in workflows.get(workflow).getArray(["security"]):
                 if str(role) in self.roles:
                     self.uploaders.put(workflow, workflows.get(workflow))
예제 #14
0
 def getDbUsers(self,sqlServer):
     rs = self.connection.getTable(Queries.SERVER_USERS)
     users = LinkedHashMap()
     while rs.next():
         login = rs.getString('loginname').strip()
         status = rs.getString('status')
         createDate= rs.getTimestamp('createdate')
         user = ObjectStateHolder('dbuser')
         user.setAttribute(Queries.DATA_NAME,login)
         user.setAttribute('dbuser_created',createDate)
         user.setAttribute('dbuser_accountstatus',status)
         user.setContainer(sqlServer)
         users.put(login,user)
     rs.close()
     return users
예제 #15
0
def pyDictToJavaMap(pyDict):
    '''
    Going forward should use pyValToJavaObj instead.
    '''
    if pyDict is None:
        return None

    if isinstance(pyDict, OrderedDict):
        jmap = LinkedHashMap()
    else:
        jmap = HashMap()

    for key in pyDict:
        jmap.put(pyValToJavaObj(key), pyValToJavaObj(pyDict[key]))
    return jmap
예제 #16
0
 def getDatabases(self,root):
     result = LinkedHashMap()
     rs = self.connection.getTable("SELECT name,crdate as createDate FROM master..sysdatabases")
     logger.debug('in get databases for root: ', root.toString())
     while rs.next():
         database = ObjectStateHolder("sqldatabase")
         database.setContainer(root)
         dbName = rs.getString("name")
         createDate = rs.getTimestamp("createDate")
         if createDate:
             database.setAttribute("createdate", Util.getSqlDateInGMT(createDate.getTime()))
             database.setAttribute("created_at", createDate)
         database.setAttribute(Queries.DATA_NAME,dbName)
         result.put(dbName,database)
     rs.close()
     return result
예제 #17
0
 def collectData(self,hostId,sqlServerId, discoverConfigs = 1):
     self.connection.open()
     oshv = ObjectStateHolderVector()
     try:
         oshv.add(self.getServerProperties(sqlServerId,hostId))
         dbMap = self.getDatabases(sqlServerId)
         #get the databases
         oshv.addAll(self.sqlDataBaseProps.getDatabases(dbMap,hostId,discoverConfigs))
         oshv.addAll(self.sqlDataBaseProps.getStoredProcedures(dbMap))
         #get the server configuration:
         logger.debug('discovering configs')
         try:
             oshv.add(self.sqlServerConfig.getServerConfiguration(sqlServerId))
             oshv.add(self.sqlServerConfig.getServerStartup(sqlServerId))
             self.sqlServerConfig.discoverPlans(oshv,sqlServerId,dbMap)
         except:
             logger.debugException(hostId.toString())
         if self.discoveryOptions and self.discoveryOptions.discoverDbUser:
             users = self.getDbUsers(sqlServerId)
             Util.addFromMap(users,oshv)
         else:
             users = LinkedHashMap()
         oshv.addAll(self.getProcesses(hostId,sqlServerId,dbMap,users))
         oshv.addAll(self.clusterConfiguration.collectData(sqlServerId))
         #db configuration:
         oshv.addAll(self.getDbConf(dbMap,hostId,users))
         logger.debug("sql db result for hostid:"+hostId.toString())
     except:
         logger.debugException(hostId.toString())
     self.connection.close()
     return oshv
예제 #18
0
def onInit():
    global running
    running = Collections.synchronizedMap(LinkedHashMap())
    running.put("Rule1", AtomicBoolean(False))
    running.put("Rule2", AtomicBoolean(False))
    running.put("Rule3", AtomicBoolean(False))
    running.put("Rule4", AtomicBoolean(False))
    sponge.setVariable("testStatus", None)
예제 #19
0
def d2dParse(d2dmodels):
    dict = LinkedHashMap()
    for entry in d2dmodels:
        if type(entry) is types.TupleType:
            d2dModelName, gfeModelName = check(entry, (str, str),
              "Format error in D2DMODELS entry", d2dmodels)

            dict.put(d2dModelName, gfeModelName)

        elif type(entry) is types.StringType:
            d2dModelName = entry
            dict.put(d2dModelName, d2dModelName)

        else:
            raise SyntaxError, "Invalid syntax for D2DMODELS" + `d2dmodels`

    return dict
예제 #20
0
def random_tick():
    hmm = LinkedHashMap()
    hmm.put("symbol", 'AAPL')
    p = Double(gen.nextInt(18))
    hmm.put("price", p)
    hmm.put("ts", Date(System.currentTimeMillis()))
    return hmm
    def buildImagesCSVTable(fileName, logger):

        # Initialize the table
        csvTable = LinkedHashMap()

        # Header
        isHeader = True

        # Read the CSV file
        br = BufferedReader(FileReader(fileName))

        # Read the first line from the text file
        line = br.readLine()

        # loop until all lines are read
        while line is not None:

            if isHeader:

                # We are past the header
                isHeader = False

                # Read next line
                line = br.readLine()

                continue

            # Get all values for current row
            row = line.split(";")

            # Remove '"' and '\' characters if needed
            for i in range(len(row)):
                row[i] = row[i].replace("\"", "")
                row[i] = row[i].replace("\\\\", "\\")
                row[i] = row[i].replace("\\", "/")

            # Add the row with the file name as key
            csvTable.put(row[6], row)

            # Read next line
            line = br.readLine()

        return csvTable
    def buildImagesCSVTable(fileName, logger):

        # Initialize the table
        csvTable = LinkedHashMap()

        # Header
        isHeader = True

        # Read the CSV file
        br = BufferedReader(FileReader(fileName))

        # Read the first line from the text file
        line = br.readLine()

        # loop until all lines are read 
        while line is not None:

            if isHeader:

                # We are past the header
                isHeader = False

                # Read next line 
                line = br.readLine()

                continue

            # Get all values for current row
            row = line.split(";")

            # Remove '"' and '\' characters if needed
            for i in range(len(row)):
                row[i] = row[i].replace("\"", "")
                row[i] = row[i].replace("\\\\", "\\")
                row[i] = row[i].replace("\\", "/")

            # Add the row with the file name as key
            csvTable.put(row[6], row)

            # Read next line 
            line = br.readLine()

        return csvTable
예제 #23
0
class WorkflowsData:
    def __activate__(self, context):
        self.roles = context["page"].authentication.get_roles_list()
        self.config = context["systemConfig"]
        workflows = JsonSimple.toJavaMap(self.config.getObject(["uploader"]))
        self.uploaders = LinkedHashMap()
        
        for workflow in workflows.keySet():
            if workflows.get(workflow).getString("", ["upload-template"]):
                for role in workflows.get(workflow).getArray(["security"]):
                    if str(role) in self.roles:
                        self.uploaders.put(workflow, workflows.get(workflow))
    
    def getUploaders(self):
        return self.uploaders
    
    def getStageInfo(self, workflowId):
        uploader = self.uploaders.get(workflowId)
        config = JsonSimple(File(StrSubstitutor.replaceSystemProperties(uploader.getString("", ["json-config"]))))
        return StageInfo(config.getArray(["stages"]))
예제 #24
0
 def __activate__(self, context):
     self.roles = context["page"].authentication.get_roles_list()
     self.config = context["systemConfig"]
     workflows = JsonSimple.toJavaMap(self.config.getObject(["uploader"]))
     self.uploaders = LinkedHashMap()
     
     for workflow in workflows.keySet():
         if workflows.get(workflow).getString("", ["upload-template"]):
             for role in workflows.get(workflow).getArray(["security"]):
                 if str(role) in self.roles:
                     self.uploaders.put(workflow, workflows.get(workflow))
예제 #25
0
def netcdfParse(netcdfDirs):
    dict = LinkedHashMap()
    for entry in netcdfDirs:
        if type(entry) is types.TupleType:
            direct, modelName = check(entry, (str, str),
              "Format error in NETCDFDIRS entry", netcdfDirs)

            if direct[-1] == '/':
                direct = direct[0:-1]
            dict.put(direct, modelName)

        elif type(entry) is types.StringType:
            direct = entry
            if direct[-1] == '/':
                direct = direct[0:-1]
            dict.put(direct,  '')

        else:
            raise SyntaxError, "Invalid syntax for NETCDFDIRS" + `netcdfDirs`

    return dict
예제 #26
0
class WorkflowsData:
    def __activate__(self, context):
        self.roles = context["page"].authentication.get_roles_list()
        self.config = context["systemConfig"]
        workflows = JsonSimple.toJavaMap(self.config.getObject(["uploader"]))
        self.uploaders = LinkedHashMap()
        
        for workflow in workflows.keySet():
            if workflows.get(workflow).getString("", ["upload-template"]):
                for role in workflows.get(workflow).getArray(["security"]):
                    if str(role) in self.roles:
                        self.uploaders.put(workflow, workflows.get(workflow))
    
    def getUploaders(self):
        return self.uploaders
    
    def getStageInfo(self, workflowId):
        uploader = self.uploaders.get(workflowId)
        config = JsonSimple(File(StrSubstitutor.replaceSystemProperties(uploader.getString("", ["json-config"]))))
        return StageInfo(config.getArray(["stages"]))
            
예제 #27
0
def pRarams(context, flute, request):

    response = LinkedHashMap()
    text = request.path()
    status = None

    if request.queryParams().size() != 0:
        text = text + "?"

        params = request.queryParams().toSingleValueMap()
        theFirst = True

        for key in params.keySet():
            if theFirst != True:
                text = text + "&"
            text = text + key + "=" + params.get(key)
            theFirst = False

        status = 200
    else:
        text = "there are no params received"
        status = 422

    response.put('text', text)
    response.put('status', status)

    return ServerResponse.status(HttpStatus.valueOf(status))\
        .body(Mono.just(text), String)
예제 #28
0
def d2dParse(d2dmodels):
    dict = LinkedHashMap()
    for entry in d2dmodels:
        if type(entry) is types.TupleType:
            d2dModelName, gfeModelName = check(
                entry, (str, str), "Format error in D2DMODELS entry",
                d2dmodels)

            dict.put(d2dModelName, gfeModelName)

        elif type(entry) is types.StringType:
            d2dModelName = entry
            dict.put(d2dModelName, d2dModelName)

        else:
            raise SyntaxError, "Invalid syntax for D2DMODELS" + ` d2dmodels `

    return dict
예제 #29
0
def netcdfParse(netcdfDirs):
    dict = LinkedHashMap()
    for entry in netcdfDirs:
        if type(entry) is types.TupleType:
            direct, modelName = check(entry, (str, str),
                                      "Format error in NETCDFDIRS entry",
                                      netcdfDirs)

            if direct[-1] == '/':
                direct = direct[0:-1]
            dict.put(direct, modelName)

        elif type(entry) is types.StringType:
            direct = entry
            if direct[-1] == '/':
                direct = direct[0:-1]
            dict.put(direct, '')

        else:
            raise SyntaxError, "Invalid syntax for NETCDFDIRS" + ` netcdfDirs `

    return dict
 def getSuggestedNames(self):
     # search common forms
     lookupNames = []
     surname = self.__metadata.getList("surname").get(0)
     firstName = self.__metadata.getList("firstName").get(0)
     firstInitial = firstName[0].upper()
     secondName = self.__metadata.getList("secondName")
     if not secondName.isEmpty():
         secondName = secondName.get(0)
     if secondName and secondName != "":
         secondInitial = secondName[0].upper()
         lookupNames.append("%s, %s. %s." % (surname, firstInitial, secondInitial))
         lookupNames.append("%s, %s %s." % (surname, firstName, secondInitial))
         lookupNames.append("%s, %s %s" % (surname, firstName, secondName))
         lookupNames.append("%s %s %s" % (firstName, secondName, surname))
     lookupNames.append("%s, %s." % (surname, firstInitial))
     lookupNames.append("%s, %s" % (surname, firstName))
     lookupNames.append("%s %s" % (firstName, surname))
     query = '" OR dc_title:"'.join(lookupNames)
     
     # general word search from each part of the name
     parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0]
     query2 = " OR dc_title:".join(parts)
     
     #filter out the linked citation
     linkedCitations = self.__manifest.getList("//children//id")
     query3 = ""
     if linkedCitations:
         query3 = " OR ".join(linkedCitations)
         query3 = " AND -id:(%s)" % query3
     
     req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5%s' % (query, query2, query3))
     self.log.info("suggestedNames query={}", req.query)
     req.setParam("fq", 'recordtype:"author"')
     req.addParam("fq", 'item_type:"object"')
     req.setParam("rows", "9999")
     req.setParam("fl", "score")
     req.setParam("sort", "score desc")
     
     # Make sure 'fq' has already been set in the session
     ##security_roles = self.authentication.get_roles_list();
     ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")'
     ##req.addParam("fq", security_query)
     
     out = ByteArrayOutputStream()
     self.__indexer.search(req, out)
     result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray()))
     
     #self.log.info("result={}", result.toString())
     docs = result.getJsonList("response/docs")
     
     exactMatchRecords = LinkedHashMap()
     map = LinkedHashMap()
     
     idList = []
     
     for doc in docs:
         authorName = doc.getList("dc_title").get(0)
         rank = self.getRank(doc.getList("score").get(0))
         id = doc.get("id")
         idList.append(id)
         #try to do automatch
         if float(rank) == 100.00 and self.isModified() == "false":
             if exactMatchRecords.containsKey(authorName):
                 authorMatchDocs = exactMatchRecords.get(authorName)
             else:
                 authorMatchDocs = ArrayList()
                 exactMatchRecords.put(authorName, authorMatchDocs)
             authorMatchDocs.add(doc)
         elif id not in linkedCitations:
             if map.containsKey(authorName):
                 authorDocs = map.get(authorName)
             else:
                 authorDocs = ArrayList()
                 map.put(authorName, authorDocs)
             authorDocs.add(doc)
     
     self.__maxScore = max(1.0, float(result.get("response/maxScore")))
     
     if idList:
         self.__isLinked(idList, map)
     
     # Do not auto save if record is live
     if self.__workflowMetadata.get("modified") == "false":
         self.__autoSaveExactRecord(exactMatchRecords)
     
     return map
예제 #31
0
    def getSuggestedNames(self):
        # search common forms
        lookupNames = []
        surname = self.__metadata.getList("surname").get(0)
        firstName = self.__metadata.getList("firstName").get(0)
        firstInitial = firstName[0].upper()
        secondName = self.__metadata.getList("secondName")
        if not secondName.isEmpty():
            secondName = secondName.get(0)
        if secondName and secondName != "":
            secondInitial = secondName[0].upper()
            lookupNames.append("%s, %s. %s." %
                               (surname, firstInitial, secondInitial))
            lookupNames.append("%s, %s %s." %
                               (surname, firstName, secondInitial))
            lookupNames.append("%s, %s %s" % (surname, firstName, secondName))
            lookupNames.append("%s %s %s" % (firstName, secondName, surname))
        lookupNames.append("%s, %s." % (surname, firstInitial))
        lookupNames.append("%s, %s" % (surname, firstName))
        lookupNames.append("%s %s" % (firstName, surname))
        query = '" OR dc_title:"'.join(lookupNames)

        # general word search from each part of the name
        parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0]
        query2 = " OR dc_title:".join(parts)

        req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5' %
                            (query, query2))
        self.log.info("suggestedNames query={}", req.query)
        req.setParam("fq", 'recordtype:"author"')
        req.addParam("fq", 'item_type:"object"')
        req.setParam("rows", "9999")
        req.setParam("fl", "score")
        req.setParam("sort", "score desc")

        # Make sure 'fq' has already been set in the session
        ##security_roles = self.authentication.get_roles_list();
        ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")'
        ##req.addParam("fq", security_query)

        out = ByteArrayOutputStream()
        indexer = self.services.getIndexer()
        indexer.search(req, out)
        result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray()))

        #self.log.info("result={}", result.toString())
        docs = result.getJsonList("response/docs")

        map = LinkedHashMap()
        for doc in docs:
            authorName = doc.getList("dc_title").get(0)
            if map.containsKey(authorName):
                authorDocs = map.get(authorName)
            else:
                authorDocs = ArrayList()
                map.put(authorName, authorDocs)
            authorDocs.add(doc)

        self.__maxScore = max(1.0, float(result.get("response/maxScore")))

        return map
예제 #32
0
    URLopener().retrieve(dependency_url, dependency_file)
    if not os.path.lexists(dependency_file):
        raise OSError('failed to download/save dependency')

sys.path.append(dependency_file)

# Next our imports.
from groovy.text import StreamingTemplateEngine
from burp import IMessageEditorTab, IMessageEditorTabFactory, IContextMenuFactory
import java.lang.Exception
from javax.swing import JMenuItem
from java.awt.event import ActionListener
from java.util import LinkedHashMap

context = LinkedHashMap()


class TemplateTab(IMessageEditorTab):
    def __init__(self, controller, editable):
        self._editable = editable
        self._component = burp.createTextEditor()
        self._component.setEditable(editable)
        self._template = None
        self._orig_content = None
        self._engine = StreamingTemplateEngine()
        self._helpers = burp.getHelpers()

    def getMessage(self):
        self._template = self._helpers.bytesToString(self._component.getText())
        try:
예제 #33
0
class Analyzer(object):
    #  global static instance of the analyzer itself
    #self = Analyzer()

    allBindings = ArrayList()
    references = LinkedHashMap()
    semanticErrors = HashMap()
    parseErrors = HashMap()
    cwd = None
    nCalled = 0
    multilineFunType = False
    path = ArrayList()
    uncalled = HashSet()
    callStack = HashSet()
    importStack = HashSet()
    astCache = AstCache()
    cacheDir = str()
    failedToParse = HashSet()
    stats = Stats()
    builtins = None  # Builtins()
    logger = logging.getLogger(__name__)
    loadingProgress = None
    projectDir = str()

    # below doesn't work for some reason....
    """ 
    def init_vars(self):
        self.allBindings = ArrayList()
        self.references = LinkedHashMap()
        self.semanticErrors = HashMap()
        self.parseErrors = HashMap()
        self.cwd = None
        self.nCalled = 0
        self.multilineFunType = False
        self.path = ArrayList()
        self.uncalled = HashSet()
        self.callStack = HashSet()
        self.importStack = HashSet()
        self.astCache = AstCache()
        self.cacheDir = str()
        self.failedToParse = HashSet()
        self.stats = Stats()
        self.builtins = None # Builtins()
        self.logger = logging.getLogger(__name__)
        self.loadingProgress = None
        self.projectDir = str()   
        """

    # singleton pattern
    _instance = None

    def __new__(cls, *args, **kwargs):
        if not cls._instance:
            cls._instance = super(Analyzer, cls).__new__(cls, *args, **kwargs)
        return cls._instance

    def __init__(self):
        self.moduleTable = Scope(None, Scope.ScopeType.GLOBAL)
        self.loadedFiles = ArrayList()
        self.globaltable = Scope(None, Scope.ScopeType.GLOBAL)

        import time
        millis = int(round(time.time() * 1000))
        self.stats.putInt("startTime", millis)
        self.logger = logging.getLogger(__name__)

        if not hasattr(Analyzer, 'self'):
            setattr(Analyzer, 'self', self)

        self.builtins = Builtins()
        self.builtins.init()
        #self.addPythonPath()
        self.createCacheDir()
        self.getAstCache()

    #  main entry to the analyzer
    def analyze(self, path):
        self.projectDir = _.unifyPath(path)
        self.loadFileRecursive(self.projectDir)

    def setCWD(self, cd):
        if cd is not None:
            self.cwd = cd
        #if cd is not None:
        #    self.cwd = _.unifyPath(cd)

    def addPaths(self, p):
        for s in p:
            addPath(s)

    def addPath(self, p):
        self.path.add(_.unifyPath(p))

    def setPath(self, path):
        self.path = ArrayList(len(path))
        self.addPaths(path)

    def addPythonPath(self):
        path = System.getenv("PYTHONPATH")
        if path is not None:
            for p in segments:
                self.addPath(p)

    def getLoadPath(self):
        loadPath = ArrayList()
        if self.cwd is not None:
            loadPath.append(self.cwd)
        if self.projectDir is not None and os.path.isdir(self.projectDir):
            loadPath.append(self.projectDir)
        loadPath += self.path
        return loadPath

    def inStack(self, f):
        return f in self.callStack

    def pushStack(self, f):
        self.callStack.add(f)

    def popStack(self, f):
        self.callStack.remove(f)

    def inImportStack(self, f):
        return f in self.importStack

    def pushImportStack(self, f):
        self.importStack.add(f)

    def popImportStack(self, f):
        self.importStack.remove(f)

    def getAllBindings(self):
        return self.allBindings

    def getCachedModule(self, file_):
        t = self.moduleTable.lookupType(_.moduleQname(file_))
        if t is None:
            return None
        elif t.isUnionType():
            for tt in t.asUnionType().getTypes():
                if tt.isModuleType():
                    return tt
            return None
        elif t.isModuleType():
            return t
        else:
            return None

    def getDiagnosticsForFile(self, file_):
        errs = self.semanticErrors.get(file_)
        if errs is not None:
            return errs
        return ArrayList()

    #@overloaded
    def putRef(self, node, bs):
        if not hasattr(bs, '__len__'):
            bs = [bs]

        if not (isinstance(node, (Url, ))):
            ref = Ref(node)
            bindings = self.references.get(ref)
            if bindings is None:
                bindings = ArrayList()
                self.references[ref] = bindings
            for b in bs:
                if not b in bindings:
                    bindings.append(b)
                b.addRef(ref)

    def getReferences(self):
        """ generated source for method getReferences """
        return self.references

    def putProblem(self, *args):
        if len(args) == 2:
            return self.putProblem0(*args)
        else:
            return self.putProblem1(*args)

    #@overloaded
    def putProblem0(self, loc, msg):
        """ generated source for method putProblem """
        file_ = loc.getFile()
        if file_ is not None:
            self.addFileErr(file_, loc.start, loc.end, msg)

    #  for situations without a Node
    #@putProblem.register(object, str, int, int, str)
    def putProblem1(self, file_, begin, end, msg):
        """ generated source for method putProblem_0 """
        if file_ is not None:
            self.addFileErr(file_, begin, end, msg)

    def addFileErr(self, file_, begin, end, msg):
        """ generated source for method addFileErr """
        d = Diagnostic(file_, Diagnostic.Category.ERROR, begin, end, msg)
        self.getFileErrs(file_, self.semanticErrors).append(d)

    def getParseErrs(self, file_):
        return self.getFileErrs(file_, self.parseErrors)

    def getFileErrs(self, file_, _map):
        msgs = _map.get(file_)
        if msgs is None:
            msgs = ArrayList()
            _map[file_] = msgs
        return msgs

    def loadFile(self, path):
        _.msg("loading: " + path)
        path = _.unifyPath(path)
        if not os.path.isfile(path):
            self.finer("\nfile not not found or cannot be read: " + path)
            return None

        module_ = self.getCachedModule(path)
        if module_ is not None:
            self.finer("\nusing cached module " + path + " [succeeded]")
            return module_

        #  detect circular import
        if Analyzer.self.inImportStack(path):
            return None

        #  set new CWD and save the old one on stack
        oldcwd = self.cwd

        self.setCWD(os.path.join(*path.split(os.sep)[:-1]))
        Analyzer.self.pushImportStack(path)
        mod = self.parseAndResolve(path)

        #  restore old CWD
        self.setCWD(oldcwd)
        return mod

    def isInLoadPath(self, dir):
        for s in getLoadPath():
            if File(s) == dir:
                return True
        return False

    def parseAndResolve(self, file_):
        self.finer("Analyzing: " + file_)
        self.loadingProgress.tick()
        try:
            ast = self.getAstForFile(file_)
            if ast is None:
                self.failedToParse.add(file_)
                return None
            else:
                self.finer("resolving: " + file_)
                mod = ast.resolve(self.moduleTable)
                assert isinstance(mod, ModuleType)
                self.finer("[success]")
                self.loadedFiles.append(file_)
                return mod
        except MemoryError as e:
            if self.astCache is not None:
                self.astCache.clear()
            import gc
            gc.collect()
            return None

    def createCacheDir(self):
        """ generated source for method createCacheDir """
        self.cacheDir = _.makePathString(_.getSystemTempDir(), "pysonar2",
                                         "ast_cache")
        f = self.cacheDir
        _.msg("AST cache is at: " + self.cacheDir)
        if not os.path.exists(f):
            os.makedirs(f)
            if not os.path.exists(f):
                _.die("Failed to create tmp directory: " + self.cacheDir +
                      ".Please check permissions")

    def getAstCache(self):
        """ generated source for method getAstCache """
        if self.astCache is None:
            self.astCache = AstCache.get()
        return self.astCache.INSTANCE

    #
    #      * Returns the syntax tree for {@code file}. <p>
    #
    def getAstForFile(self, file_):
        return self.getAstCache().getAST(file_)

    def getBuiltinModule(self, qname):
        return self.builtins.get(qname)

    def makeQname(self, names):
        if _.isEmpty(names):
            return ""

        ret = ""
        i = 0
        while i < len(names) - 1:
            ret += names[i].id + "."
            i += 1
        ret += names[len(names) - 1].id
        return ret

    #
    #      * Find the path that contains modname. Used to find the starting point of locating a qname.
    #      *
    #      * @param headName first module name segment
    #
    def locateModule(self, headName):
        loadPath = self.getLoadPath()

        for p in loadPath:
            startDir = os.sep.join([p, headName])
            initFile = _.joinPath(startDir, "__init__.py")

            if os.path.exists(initFile):
                return p

            startFile = startDir + ".py"
            if os.path.exists(startFile):
                return p

        return None

    def loadModule(self, name, scope):
        if _.isEmpty(name):
            return None

        from Binding import Binding

        qname = self.makeQname(name)
        mt = self.getBuiltinModule(qname)
        if mt is not None:
            scope.insert(
                name[0].id,
                Url(Builtins.LIBRARY_URL + mt.getTable().getPath() + ".html"),
                mt, Binding.Kind.SCOPE)
            return mt

        #  If there's more than one segment
        #  load the packages first
        prev = None
        startPath = self.locateModule(name[0].id)
        if startPath is None:
            return None

        path = startPath
        for i, n in enumerate(name):
            path = os.sep.join([path, name[i].id])
            initFile = _.joinPath(path, "__init__.py")

            if os.path.isfile(initFile):
                mod = self.loadFile(initFile)
                if mod is None:
                    return None
                if prev is not None:
                    prev.getTable().insert(name[i].id, name[i], mod,
                                           Binding.Kind.VARIABLE)
                else:
                    scope.insert(name[i].id, name[i], mod,
                                 Binding.Kind.VARIABLE)
                prev = mod

            elif i == len(name) - 1:
                startFile = path + ".py"
                if os.path.isfile(startFile):
                    mod = self.loadFile(startFile)
                    if mod is None:
                        return None
                    if prev is not None:
                        prev.getTable().insert(name[i].id, name[i], mod,
                                               Binding.Kind.VARIABLE)
                    else:
                        scope.insert(name[i].id, name[i], mod,
                                     Binding.Kind.VARIABLE)
                    prev = mod
                else:
                    return None

        return prev

    #
    #      * Load all Python source files recursively if the given fullname is a
    #      * directory; otherwise just load a file.  Looks at file extension to
    #      * determine whether to load a given file.
    #
    def loadFileRecursive(self, fullname):
        count = self.countFileRecursive(fullname)
        if self.loadingProgress is None:
            self.loadingProgress = FancyProgress(count, 50)
        if os.path.isdir(fullname):
            for root, dirs, files in os.walk(fullname):
                for f in files:
                    self.loadFileRecursive(root + os.sep + f)
                for d in dirs:
                    self.loadFileRecursive(root + os.sep + d)
        else:
            if fullname.endswith(".py"):
                self.loadFile(fullname)

    #  count number of .py files
    def countFileRecursive(self, fullname):
        sum = 0
        if os.path.isdir(fullname):
            for root, dirs, files in os.walk(fullname):
                for f in files:
                    sum += self.countFileRecursive(root + os.sep + f)
                for d in dirs:
                    sum += self.countFileRecursive(root + os.sep + d)
        else:
            if fullname.endswith(".py"):
                sum += 1
        return sum

    def finish(self):
        """ generated source for method finish """
        #         progress.end();
        _.msg("\nFinished loading files. " + str(self.nCalled) +
              " functions were called.")
        _.msg("Analyzing uncalled functions")
        self.applyUncalled()
        #  mark unused variables
        for b in self.allBindings:
            if not b.getType().isClassType() and not b.getType().isFuncType(
            ) and not b.getType().isModuleType() and _.isEmpty(b.getRefs()):
                Analyzer.self.putProblem(
                    b.getNode(), "Unused variable: " + b.__class__.__name__)
        for ent in self.references.items():
            self.convertCallToNew(ent[0], ent[1])
        _.msg(self.getAnalysisSummary())

    def close(self):
        """ generated source for method close """
        self.astCache.close()

    def convertCallToNew(self, ref, bindings):
        """ generated source for method convertCallToNew """
        if ref.isRef():
            return
        if len(bindings) == 0:
            return
        nb = bindings[0]
        t = nb.getType()
        if t.isUnionType():
            t = t.asUnionType().firstUseful()
            if t is None:
                return
        if not t.isUnknownType() and not t.isFuncType():
            ref.markAsNew()

    def addUncalled(self, cl):
        """ generated source for method addUncalled """
        if not cl.func.called:
            self.uncalled.add(cl)

    def removeUncalled(self, f):
        if f in self.uncalled: self.uncalled.remove(f)

    def applyUncalled(self):
        """ generated source for method applyUncalled """
        progress = FancyProgress(len(self.uncalled), 50)
        while not _.isEmpty(self.uncalled):
            uncalledDup = list(self.uncalled)
            for cl in uncalledDup:
                progress.tick()
                Call.apply(cl, None, None, None, None, None)

    def getAnalysisSummary(self):
        sb = []
        sb.append("\n" + _.banner("analysis summary"))
        duration = _.formatTime(_.millis() - self.stats.getInt("startTime"))
        sb.append("\n- total time: " + duration)
        sb.append("\n- modules loaded: " + str(len(self.loadedFiles)))
        sb.append("\n- semantic problems: " + str(len(self.semanticErrors)))
        sb.append("\n- failed to parse: " + str(len(self.failedToParse)))
        #  calculate number of defs, refs, xrefs
        nDef = 0
        nXRef = 0
        for b in self.getAllBindings():
            nDef += 1
            nXRef += len(b.getRefs())
        sb.append("\n- number of definitions: " + str(nDef))
        sb.append("\n- number of cross references: " + str(nXRef))
        sb.append("\n- number of references: " +
                  str(len(self.getReferences())))
        resolved = self.stats.getInt("resolved")
        unresolved = self.stats.getInt("unresolved")
        sb.append("\n- resolved names: " + str(resolved))
        sb.append("\n- unresolved names: " + str(unresolved))
        sb.append("\n- name resolve rate: " +
                  _.percent(resolved, resolved + unresolved))
        sb.append("\n" + _.getGCStats())
        return ''.join(sb)

    def getLoadedFiles(self):
        files = ArrayList()
        for file_ in self.loadedFiles:
            if file_.endswith(".py"):
                files.append(file_)
        return files

    def registerBinding(self, b):
        self.allBindings.append(b)

    def log(self, level, msg):
        _.msg(msg)

    def severe(self, msg):
        self.log(Level.SEVERE, msg)

    def warn(self, msg):
        self.log(Level.WARNING, msg)

    def info(self, msg):
        self.log(Level.INFO, msg)

    def fine(self, msg):
        self.log(Level.FINE, msg)

    def finer(self, msg):
        self.log('*a log level*', msg)

    def __str__(self):
        return "<Analyzer:locs=" + len(self.references) + ":probs=" + len(
            self.semanticErrors) + ":files=" + len(self.loadedFiles) + ">"
예제 #34
0
    def buildPostDataFortoken(self, encodedJWT, softwareStatementId) :
		postParameters = LinkedHashMap()
		postParameters.put("scope", self.clientScopes)
		postParameters.put("client_assertion_type", "urn:ietf:params:oauth:client-assertion-type:jwt-bearer")
		postParameters.put("grant_type", "client_credentials")
		postParameters.put("client_id", softwareStatementId)
		postParameters.put("client_assertion", encodedJWT)

		postData = StringBuilder()
		for param in postParameters.entrySet():
			if postData.length() != 0:
				postData.append('&')
			postData.append(URLEncoder.encode(param.getKey(), "UTF-8"))
			postData.append('=')
			postData.append(URLEncoder.encode(String(param.getValue()), "UTF-8").replace("+", "%20"))
		print "Post data: "+postData.toString()
		return postData.toString()
예제 #35
0
# Simple Jython example inspired by:
# http://coffeeonesugar.wordpress.com/2009/07/21/getting-started-with-esper-in-5-minutes/
from java.util import Random, Date, LinkedHashMap
from java.lang import String, Double, System
import com.espertech.esper.client as C

type_map = LinkedHashMap()
type_map.put("symbol", String)
type_map.put("price", Double)
type_map.put("ts", Date)

gen = Random()

def random_tick():
    hmm = LinkedHashMap()
    hmm.put("symbol", 'AAPL')
    p = Double(gen.nextInt(18))
    hmm.put("price", p)
    hmm.put("ts", Date(System.currentTimeMillis()))
    return hmm

class Listener(C.UpdateListener):
    def update(*args, **kwargs):
        a = args[1][0]
        print "Symbol: %s Price: %5.2f  Ts: %s" % (a.get("symbol"),
            a.get("price"), a.get("ts"))

def main():
    conf = C.Configuration()
    conf.addEventType("StockTick", type_map)
    cep = C.EPServiceProviderManager.getProvider("myCEPEngine", conf)
예제 #36
0
def otherParse(validSites, serverhost, mhsid, port, initmodules, accumElem,
               initskips, d2ddbver, logfilepurge, prddir, home, extraWEPrec,
               vtecRequestTime, autoConfigureNotifyTextProd,
               iscRoutingTableAddress, requestedISCsites, requestISC,
               sendiscOnSave, sendiscOnPublish, requestedISCparms,
               transmitScript):
    if type(serverhost) != str:
        raise TypeError, "GFESUITE_HOST not an str: " + ` serverhost `
    if type(mhsid) != str:
        raise TypeError, "GFESUITE_MHSID not an str: " + ` mhsid `
    if type(vtecRequestTime) != int:
        raise TypeError, "VTECPartners: VTEC_REMOTE_TABLE_FETCH_TIME " + \
          "not an int: " + `vtecRequestTime`
    if type(port) != int:
        raise TypeError, "GFESUITE_PORT not an int: " + ` port `
    initmodules = dictCheck(initmodules, list, str, "INITMODULES")
    accumElem = dictCheck(accumElem, list, str, "D2DAccumulativeElements")
    initskips = dictCheck(initskips, list, int, "INITSKIPS")
    d2ddbver = dictCheck(d2ddbver, int, None, "D2DDBVERSIONS")
    if type(logfilepurge) != int:
        raise TypeError, "LOG_FILE_PURGE_AFTER not an int: " + ` logfilepurge `
    if type(autoConfigureNotifyTextProd) != int:
        raise TypeError, "AUTO_CONFIGURE_NOTIFYTEXTPROD not an int: " + \
          `logfilepurge`
    if type(prddir) != str:
        raise TypeError, "GFESUITE_PRDDIR not an str: " + ` prddir `
    if type(home) != str:
        raise TypeError, "GFESUITE_HOME not an str: " + ` home `
    if type(extraWEPrec) != list:
        raise TypeError, "ExtraWEPrec not an list: " + ` extraWEPrec `
    else:
        extraWEPrecision = LinkedHashMap()
        for e in extraWEPrec:
            if type(e) == str:
                extraWEPrecision.put(e, Integer(1))
            elif type(e) == tuple and len(e) == 2 and type(e[0]) == str and \
              type(e[1]) == int:
                extraWEPrecision.put(e[0], Integer(e[1]))
            else:
                raise TypeError, \
                  "Entry in ExtraWEPrec not str or (str, int): " + `e`

    iscRoutingTableAddress = dictCheck(iscRoutingTableAddress, str, str,
                                       "ISC_ROUTING_TABLE_ADDRESS")
    #if type(iscRoutingTableAddress) not in [str, types.NoneType]:
    #    raise TypeError, "ISC_ROUTING_TABLE_ADDRESS not None or a str: " + \
    #      `iscRoutingTableAddress`
    #elif iscRoutingTableAddress is None:
    #    iscRoutingTableAddress = ""

    reqISCsites = ArrayList()
    if type(requestedISCsites) not in [list, types.NoneType]:
        raise TypeError, "REQUESTED_ISC_SITES not None or a list: " + \
          `requestedISCsites`
    elif type(requestedISCsites) is list:
        for r in requestedISCsites:
            if type(r) != str:
                raise TypeError, "REQUESTED_ISC_SITES not list of strings: " + \
                  `requestedISCsites`
                #Verify requested ISC site is of desired pattern
            elif r not in validSites:
                raise ValueError, "Requested ISC site: " + str(
                    r) + " could not be found in serverConfig.py."
            else:
                reqISCsites.add(r)

    reqISCparms = ArrayList()
    if type(requestedISCparms) not in [list, types.NoneType]:
        raise TypeError, "REQUESTED_ISC_PARMS not None or a list: " + \
          `requestedISCparms`
    elif type(requestedISCparms) is list:
        for r in requestedISCparms:
            if type(r) != str:
                raise TypeError, "REQUESTED_ISC_PARMS not list of strings: " + \
                  `requestedISCparms`
                #Verify requested ISC parm is of desired pattern
            elif not re.match(configProps.ISC_PARM_PATTERN, str(r)):
                raise ValueError, "Requested ISC parm: " + str(
                    r
                ) + " does not match desired pattern: " + configProps.ISC_PARM_PATTERN
            else:
                reqISCparms.add(r)

    if type(requestISC) != bool:
        #If the type is boolean, it is already a valid value
        #If the type is not boolean, and is not int, then it is not valid
        if type(requestISC) != int:
            raise TypeError, "REQUEST_ISC not an int or boolean: " + ` requestISC `
        #Verify request ISC is of valid value
        elif not ((requestISC == 0) or (requestISC == 1)):
            raise ValueError, "REQUEST_ISC is: " + ` requestISC ` + ", but expected True, False, 0 or 1"

    if type(sendiscOnSave) != bool:
        #If the type is boolean, it is already a valid value
        #If the type is not boolean, and is not int, then it is not valid
        if type(sendiscOnSave) != int:
            raise TypeError, "SEND_ISC_ON_SAVE not an int or boolean: " + ` sendiscOnSave `
        #Verify send ISC on save is of valid value
        elif not ((sendiscOnSave == 0) or (sendiscOnSave == 1)):
            raise ValueError, "SEND_ISC_ON_SAVE is: " + ` sendiscOnSave ` + ", but expected True, False, 0 or 1"

    if type(sendiscOnPublish) != bool:
        #If the type is boolean, it is already a valid value
        #If the type is not boolean, and is not int, then it is not valid
        if type(sendiscOnPublish) != int:
            raise TypeError, "SEND_ISC_ON_PUBLISH not an int or boolean: " + ` sendiscOnPublish `
        #Verify send ISC on publish is of valid value
        elif not ((sendiscOnPublish == 0) or (sendiscOnPublish == 1)):
            raise ValueError, "SEND_ISC_ON_PUBLISH is: " + ` sendiscOnPublish ` + ", but expected True, False, 0 or 1"

    if type(transmitScript) not in [str, types.NoneType]:
        raise TypeError, "TRANSMIT_SCRIPT not None or str: " + ` transmitScript `
    elif transmitScript is None:
        transmitScript = ""

    return serverhost, mhsid, \
      port, initmodules, accumElem, \
      initskips, d2ddbver, logfilepurge, prddir, home,\
      extraWEPrecision, vtecRequestTime, \
      autoConfigureNotifyTextProd, \
      iscRoutingTableAddress, reqISCsites, requestISC, sendiscOnSave, \
      sendiscOnPublish, reqISCparms, transmitScript
예제 #37
0
def onInit():
    # Variables for assertions only
    sponge.setVariable("hardwareFailureScriptCount", AtomicInteger(0))
    sponge.setVariable("hardwareFailureScriptFinishCount", AtomicInteger(0))
    sponge.setVariable("eventLogs",
                       Collections.synchronizedMap(LinkedHashMap()))
예제 #38
0
#    Apr 23, 2015    4259          njensen        Updated for new JEP API
#    09/01/2015          16287     amoore         Additional validation of user input
#    05/24/2016          15633     bhunder        Modified so that a parm name could
#                                                 contain your office type.
#
########################################################################
import types, re, configProps

from java.util import ArrayList, LinkedHashMap
from java.lang import Integer, Float
from com.vividsolutions.jts.geom import Coordinate
from java.awt import Point

Databases = {}
Projections = {}
DiscreteDef = LinkedHashMap()  #from parseKeys()


# Check a python sequence to see that
# it matches the format.
# data: is a sequence of objects
# fmt : is a parallel sequence of type objects
# message : optional message to print on exception
#
# Returns data.
# If len(data) != len(fmt)
# or the type of each element in data does not
# match the coresponding type in fmt, then
# a TypeError is raised.
# Example:  a, b = check(([1, 3], "foo"), (list, str))
def check(data, fmt, message, allData=None):
예제 #39
0
	parser.print_usage()
	sys.exit(1)

xmlDir = args[0]
outDir = args[1]
if options.size:
	size = options.size
else:
	size = 1.0

mtb = MeshTraitsBuilder.getDefault3D()
mtb.addNodeList()
mesh = Mesh(mtb)
MeshReader.readObject3D(mesh, xmlDir)

bgroupMap = LinkedHashMap()
#print "beams size: "+str(mesh.getBeams().size())
for i in xrange(mesh.getBeams().size() / 2):
	bId = mesh.getBeamGroup(i)
	listBeamId = bgroupMap.get(bId)
	if listBeamId is None:
		listBeamId = TIntArrayList(100)
		bgroupMap.put(bId, listBeamId)
	listBeamId.add(i)

vertices = ArrayList(mesh.getBeams())
mesh.resetBeams()
mapGroupToListOfPolylines = LinkedHashMap()
for bId in bgroupMap.keySet():
	listBeamId = bgroupMap.get(bId)
	listOfPolylines = ArrayList()
예제 #40
0
파일: refine.py 프로젝트: alclp/jCAE
if options.point_metric_file:
	if options.rho > 1.0:
		algo.setAnalyticMetric(DistanceMetric(options.size, options.point_metric_file, options.rho, options.mixed))
	else:
		algo.setAnalyticMetric(DistanceMetric(options.size, options.point_metric_file))
elif setAnalytic:
	algo.setAnalyticMetric(RemeshMetric());

algo.compute();
#MeshWriter.writeObject3D(algo.getOutputMesh(), outDir, String())
if options.recordFile:
	liaison.getMesh().getTrace().finish()

# Now compute beams
bgroupMap = LinkedHashMap()
newMesh = algo.getOutputMesh()
#print "beams size: "+str(mesh.getBeams().size())
for i in xrange(newMesh.getBeams().size() / 2):
	bId = newMesh.getBeamGroup(i)
	listBeamId = bgroupMap.get(bId)
	if listBeamId is None:
		listBeamId = TIntArrayList(100)
		bgroupMap.put(bId, listBeamId)
	listBeamId.add(i)

vertices = ArrayList(newMesh.getBeams())
newMesh.resetBeams()
mapGroupToListOfPolylines = LinkedHashMap()
for bId in bgroupMap.keySet():
	listBeamId = bgroupMap.get(bId)
예제 #41
0
def _toJavaLinkedMap(val):
    '''
    Turns a Python OrderedDict to a Java LinkedHashMap
    '''
    return __toJavaMapInternal(val, LinkedHashMap())
    def __workflow(self):
        # Workflow data
        WORKFLOW_ID = "relay"
        wfChanged = False
        workflow_security = []
        self.message_list = None
        try:
            wfPayload = self.object.getPayload("workflow.metadata")
            wfMeta = self.utils.getJsonObject(wfPayload.open())
            wfPayload.close()

            # Are we indexing because of a workflow progression?
            targetStep = wfMeta.getString(None, ["targetStep"])
            if targetStep is not None and targetStep != wfMeta.getString(None, ["step"]):
                wfChanged = True
                # Step change
                wfMeta.getJsonObject().put("step", targetStep)
                wfMeta.getJsonObject().remove("targetStep")

            # This must be a re-index then
            else:
                targetStep = wfMeta.getString(None, ["step"])

            # Security change
            stages = self.config.getJsonSimpleList(["stages"])
            for stage in stages:
                if stage.getString(None, ["name"]) == targetStep:
                    wfMeta.getJsonObject().put("label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    if wfChanged == True:
                        self.message_list = stage.getStringList(["message"])

            # Form processing
            formData = wfMeta.getObject(["formData"])
            if formData is not None:
                formData = JsonSimple(formData)
            else:
                formData = None
            coreFields = ["title", "creator", "contributor", "description", "format", "creationDate"]
            if formData is not None:
                # Core fields
                title = formData.getStringList(["title"])
                if title:
                    self.titleList = title
                creator = formData.getStringList(["creator"])
                if creator:
                    self.creatorList = creator
                contributor = formData.getStringList(["contributor"])
                if contributor:
                    self.contributorList = contributor
                description = formData.getStringList(["description"])
                if description:
                    self.descriptionList = description
                format = formData.getStringList(["format"])
                if format:
                    self.formatList = format
                creation = formData.getStringList(["creationDate"])
                if creation:
                    self.creationDate = creation
                # Course security - basic
                course = formData.getStringList(["course_code"])
                if course:
                    self.item_security.add(course)
                # Course security - moodle
                moodle_courses = formData.getString(None, ["moodleSecurity"])
                if moodle_courses:
                    moodleList = moodle_courses.split(",")
                    for course in moodleList:
                        if course != "":
                            self.item_security.add(course)
                # Course facets - Peoplesoft
                psMoodle_courses = formData.getString(None, ["psMoodle"])
                if psMoodle_courses:
                    psMoodleList = psMoodle_courses.split(",")
                    for course in psMoodleList:
                        if course != "":
                            self.__indexCourse(course)

                # Non-core fields
                data = formData.getJsonObject()
                for field in data.keySet():
                    if field not in coreFields:
                        data = formData.getStringList([field])
                        if field.startswith("dc_subject."):
                            subjectField = "dc_subject"
                            if self.customFields.has_key(subjectField):
                                subjectList = self.customFields[subjectField]
                                if subjectList:
                                   for subject in subjectList:
                                       data.add(subject)
                            field = subjectField
                        self.customFields[field] = data


        except StorageException, e:
            # No workflow payload, time to create
            wfChanged = True
            wfMeta = JsonSimple()
            wfMetaObj = wfMeta.getJsonObject()
            wfMetaObj.put("id", WORKFLOW_ID)
            wfMetaObj.put("step", "pending")
            wfMetaObj.put("pageTitle", "Camtasia Relay Files - Management")
            
            metaMap = LinkedHashMap();
            if self.relayTitle is not None:
                metaMap.put("title", self.relayTitle);
            if self.relayDescription is not None:
                metaMap.put("description", self.relayDescription);
    
            if not metaMap.isEmpty():
                wfMetaObj.put("formData", metaMap);

            stages = self.config.getJsonSimpleList(["stages"])
            for stage in stages:
                if stage.getString(None, ["name"]) == "pending":
                    wfMetaObj.put("label", stage.getString(None, ["label"]))
                    self.item_security = stage.getStringList(["visibility"])
                    workflow_security = stage.getStringList(["security"])
                    self.message_list = stage.getStringList(["message"])
예제 #43
0
opts.clear()
opts.put("coplanarity", "0.9")
opts.put("size", str(options.size * 0.2))
opts.put("maxlength", str(options.size * 1.2))
QEMDecimateHalfEdge(liaison, opts).compute()

opts.clear()
opts.put("coplanarity", "0.75")
opts.put("tolerance", "0.6")
opts.put("iterations", str(8))
SmoothNodes3DBg(liaison, opts).compute()

#MeshWriter.writeObject3D(liaison.mesh, outDir, "")

# Now remesh beams
bgroupMap = LinkedHashMap()
for i in xrange(liaison.mesh.getBeams().size() / 2):
    bId = liaison.mesh.getBeamGroup(i)
    listBeamId = bgroupMap.get(bId)
    if listBeamId is None:
        listBeamId = TIntArrayList(100)
        bgroupMap.put(bId, listBeamId)
    listBeamId.add(i)

vertices = ArrayList(liaison.mesh.getBeams())
liaison.mesh.resetBeams()
mapGroupToListOfPolylines = LinkedHashMap()
for bId in bgroupMap.keySet():
    listBeamId = bgroupMap.get(bId)
    listOfPolylines = ArrayList()
    polyline = ArrayList()
# Market Instruments (just Swaps in our example)
def makeSwap(ccy,years,rate,crvName):
  floating = AnnuityCouponIbor(ccy,[ 0.5*i for i in range(1,2*years+1)],crvName,crvName,True) # floating paying semi-annually
  fixed = AnnuityCouponFixed(ccy,range(1,years+1),rate,crvName, False) # fixed side paying annually
  return FixedCouponSwap(fixed,floating)

mktInstruments = []
mktValues = []
for i in range(nMats):
  iswap = makeSwap(ccy,mats[i],rates[i],crvName)
  mktInstruments.append(iswap)
  mktValues.append(0.0) # !!! By definition, on-market swaps have zero value

# Market Data Bundle likes maps
from java.util import LinkedHashMap
mapCrvMat = LinkedHashMap()
mapCrvInterp = LinkedHashMap()
mapSensInterp = LinkedHashMap()
aMats = array.array('d',mats) # HashMaps like arrays
mapCrvMat.put(crvName,aMats)
print mapCrvMat.values()
mapCrvInterp.put(crvName,interp)
mapSensInterp.put(crvName,interpSens)

crvFinderDataBundle = IR.MultipleYieldCurveFinderDataBundle(mktInstruments, mktValues,None, mapCrvMat, mapCrvInterp, mapSensInterp)

# The function itself
func = IR.MultipleYieldCurveFinderFunction(crvFinderDataBundle, pvCalc);

# Compute Jacobian Analytically. We could also use FD to approximate the sensitivities from our Calculator
jacobian = IR.MultipleYieldCurveFinderJacobian(crvFinderDataBundle,  sensCalc)
예제 #45
0
JAR_EXTENSION = '.jar'
PLUGINS_TYPE = {JAR_EXTENSION:'java jar file',
                '.class':'java class file',
                '.txt':'macro',
                '.js':'javascript file',
                '.rb':'jruby script',
                '.py':'jython script',
                '.clj':'clojure script'}
# Folder names
PLUGINS_FOLDER = 'plugins'
PLUGINS_MENU_NAME = 'Plugins'

URL = 'http://pacific.mpi-cbg.de/wiki/index.php'
PAGE = 'Template:PluginList'

allElements = LinkedHashMap()
allElements[''] = []

uploadToWiki = False
compareToWiki = False
if len(sys.argv) > 1 and sys.argv[1] == '--upload-to-wiki':
    uploadToWiki = True
    sys.argv = sys.argv[:1] + sys.argv[2:]
elif len(sys.argv) > 1 and sys.argv[1] == '--compare-to-wiki':
    compareToWiki = True
    sys.argv = sys.argv[:1] + sys.argv[2:]

if len(sys.argv) < 2:
    fiji_folder = os.path.curdir
else:
    fiji_folder = sys.argv[1]
예제 #46
0
def otherParse(serverhost, mhsid, port,  
  initmodules, accumElem,
  initskips, d2ddbver, logfilepurge, prddir, home,
  extraWEPrec, vtecRequestTime, autoConfigureNotifyTextProd,
  iscRoutingTableAddress, requestedISCsites, requestISC, sendiscOnSave,
  sendiscOnPublish, requestedISCparms, transmitScript):
    if type(serverhost) != str:
        raise TypeError, "GFESUITE_HOST not an str: " + `serverhost`
    if type(mhsid) != str:
        raise TypeError, "GFESUITE_MHSID not an str: " + `mhsid`
    if type(vtecRequestTime) != int:
        raise TypeError, "VTECPartners: VTEC_REMOTE_TABLE_FETCH_TIME " + \
          "not an int: " + `vtecRequestTime`
    if type(port) != int:
        raise TypeError, "GFESUITE_PORT not an int: " + `port`
    initmodules = dictCheck(initmodules, list, str, "INITMODULES")
    accumElem = dictCheck(accumElem, list, str, "D2DAccumulativeElements")
    initskips = dictCheck(initskips, list, int, "INITSKIPS")
    d2ddbver = dictCheck(d2ddbver, int, None, "D2DDBVERSIONS")
    if type(logfilepurge) != int:
        raise TypeError, "LOG_FILE_PURGE_AFTER not an int: " + `logfilepurge`
    if type(autoConfigureNotifyTextProd) != int:
        raise TypeError, "AUTO_CONFIGURE_NOTIFYTEXTPROD not an int: " + \
          `logfilepurge`
    if type(prddir) != str:
        raise TypeError, "GFESUITE_PRDDIR not an str: " + `prddir`
    if type(home) != str:
        raise TypeError, "GFESUITE_HOME not an str: " + `home`
    if type(extraWEPrec) != list:
        raise TypeError, "ExtraWEPrec not an list: " + `extraWEPrec`
    else:
        extraWEPrecision = LinkedHashMap()
        for e in extraWEPrec:
            if type(e) == str:
                extraWEPrecision.put(e, Integer(1))
            elif type(e) == tuple and len(e) == 2 and type(e[0]) == str and \
              type(e[1]) == int:
                extraWEPrecision.put(e[0], Integer(e[1]))
            else:
                raise TypeError, \
                  "Entry in ExtraWEPrec not str or (str, int): " + `e`

    iscRoutingTableAddress = dictCheck(iscRoutingTableAddress,str,str,"ISC_ROUTING_TABLE_ADDRESS")
    #if type(iscRoutingTableAddress) not in [str, types.NoneType]:
    #    raise TypeError, "ISC_ROUTING_TABLE_ADDRESS not None or a str: " + \
    #      `iscRoutingTableAddress`
    #elif iscRoutingTableAddress is None:
    #    iscRoutingTableAddress = ""

    reqISCsites = ArrayList()
    if type(requestedISCsites) not in [list, types.NoneType]:
        raise TypeError, "REQUESTED_ISC_SITES not None or a list: " + \
          `requestedISCsites`
    elif type(requestedISCsites) is list:
        for r in requestedISCsites:
            if type(r) != str:
                raise TypeError, "REQUESTED_ISC_SITES not list of strings: " + \
                  `requestedISCsites`
            else:
                reqISCsites.add(r);

    reqISCparms = ArrayList()
    if type(requestedISCparms) not in [list, types.NoneType]:
        raise TypeError, "REQUESTED_ISC_PARMS not None or a list: " + \
          `requestedISCparms`
    elif type(requestedISCparms) is list:
        for r in requestedISCparms:
            if type(r) != str:
                raise TypeError, "REQUESTED_ISC_PARMS not list of strings: " + \
                  `requestedISCparms`
            else:
                reqISCparms.add(r)

    if type(requestISC) != int:
        raise TypeError, "REQUEST_ISC not an int: " + `requestISC`
    if type(sendiscOnSave) != int:
        raise TypeError, "SEND_ISC_ON_SAVE not an int: " + `sendiscOnSave`
    if type(sendiscOnPublish) != int:
        raise TypeError, "SEND_ISC_ON_PUBLISH not an int: " + `sendiscOnPublish`

    if type(transmitScript) not in [str, types.NoneType]:
        raise TypeError, "TRANSMIT_SCRIPT not None or str: " + `transmitScript`
    elif transmitScript is None:
        transmitScript = ""

    return serverhost, mhsid, \
      port, initmodules, accumElem, \
      initskips, d2ddbver, logfilepurge, prddir, home,\
      extraWEPrecision, vtecRequestTime, \
      autoConfigureNotifyTextProd, \
      iscRoutingTableAddress, reqISCsites, requestISC, sendiscOnSave, \
      sendiscOnPublish, reqISCparms, transmitScript