def genernate_news_content(self, webpart): cache_key = "sbj" + str(self.subject.subjectId) + "_" + str(webpart.getSubjectWebpartId()) + self.cacheKeyFix content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return map = HashMap() qry = SiteNewsQuery(""" snews.newsId, snews.title, snews.createDate""") qry.subjectId = self.subject.subjectId qry.hasPicture = False subject_text_news = qry.query_map(6) map.put("subject_text_news", subject_text_news) map.put("subject", self.subject) map.put("SubjectRootUrl", self.subjectRootUrl) map.put("webpart", webpart) map.put("unitId", self.unitId) content = self.templateProcessor.processTemplate(map, "/WEB-INF/subjectpage/" + self.templateName + "/text_news.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def genernate_preparecourse_content(self, webpart): cache_key = "unit" + str(self.unit.unitId) + "_" + str( webpart.getUnitWebpartId()) content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return qry = UnitPrepareCourseQuery( "pc.prepareCourseId, pc.title, pc.startDate, pc.endDate, u.userId") qry.unitId = self.unit.unitId unitPrepareCourseList = qry.query_map(10) map = HashMap() map.put("unit", self.unit) map.put("webpart", webpart) map.put("UnitRootUrl", self.unitRootUrl) map.put("unitPrepareCourseList", unitPrepareCourseList) content = self.templateProcessor.processTemplate( map, "/WEB-INF/unitspage/" + self.templateName + "/unit_preparecourse.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def genernate_jiaoyanyuan_content(self, webpart): cache_key = "sbj" + str(self.subject.subjectId) + "_" + str( webpart.getSubjectWebpartId()) + self.cacheKeyFix content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return map = HashMap() jiaoyanyuan = self.get_subject_comissioner() map.put("jiaoyanyuan", jiaoyanyuan) map.put("subject", self.subject) map.put("SubjectRootUrl", self.subjectRootUrl) map.put("webpart", webpart) map.put("unitId", self.unitId) content = self.templateProcessor.processTemplate( map, "/WEB-INF/subjectpage/" + self.templateName + "/jiaoyanyuan.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def saveRiskInfo(entityName,list,rulecode): riskList=autoService.findByHql("from U_BANK_RISKRULEINFO v where v.rulecode='"+rulecode+"'"); if riskList !='null' and riskList.size()>0 : riskinfoList = ArrayList() riskHM = riskList.get(0) for entity in list: riskinfoMap= HashMap() riskinfoMap.put("uniseqno",entity.get("uniseqno")) riskinfoMap.put("trxdate",entity.get("trxdate")) riskinfoMap.put("trxtime",entity.get("trxtime")) riskinfoMap.put("tellerno",entity.get("tellerno")) riskinfoMap.put("custname",entity.get("rcustname")) riskinfoMap.put("brno",entity.get("brno")) riskinfoMap.put("riskdate","20090210") riskinfoMap.put("risktime","110911") riskinfoMap.put("risklevel",riskHM.get("risklevel")) riskinfoMap.put("riskcode",riskHM.get("rulecode")) riskinfoMap.put("riskname",riskHM.get("rulename")) riskinfoMap.put("risktype",riskHM.get("risktype")) riskinfoList.add(riskinfoMap) autoService.saveAll(entityName, riskinfoList)
def genernate_daitouren_content(self, webpart): cache_key = "sbj" + str(self.subject.subjectId) + "_" + str( webpart.getSubjectWebpartId()) + self.cacheKeyFix content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return map = HashMap() expert_user_list = self.get_expert_list() map.put("expert_user_list", expert_user_list) map.put("subject", self.subject) map.put("SubjectRootUrl", self.subjectRootUrl) map.put("webpart", webpart) map.put("unitId", self.unitId) content = self.templateProcessor.processTemplate( map, "/WEB-INF/subjectpage/" + self.templateName + "/daitouren.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def __init__(self, name, results): self.__facetMap = HashMap() self.__facetList = ArrayList() facets = results.getFacets() if facets is None: return facet = facets.get(name) if facet is None: return facetData = facet.values() for value in facetData.keySet(): count = facetData.get(value) facet = Facet(name, value, count) self.__facetMap.put(value, facet) slash = value.rfind("/") if slash == -1: self.__facetList.add(facet) else: parent = self.__getFacet(value[:slash]) if parent is not None: parent.addSubFacet(facet)
def __init__(self, func=None, env=None): self.arrows = HashMap() self.cls = None self.defaultTypes = List() self.selfType = None self.func = None from pysonarsq.java.Analyzer import Analyzer super(FunType, self).__init__() self.env = None if isinstance(func, FunctionDef): self.func = func self.env = env elif isinstance(func, Type): from_, to = func, env self.addMapping(from_, to) self.getTable().addSuper( Analyzer.self.builtins.BaseFunction.getTable()) self.getTable().setPath( Analyzer.self.builtins.BaseFunction.getTable().getPath())
def testPerField(self): perField = HashMap() perField.put("special", SimpleAnalyzer()) analyzer = PerFieldAnalyzerWrapper(WhitespaceAnalyzer(), perField) text = "Qwerty" tokenStream = analyzer.tokenStream("field", StringReader(text)) tokenStream.reset() termAtt = tokenStream.getAttribute(CharTermAttribute.class_) self.assert_(tokenStream.incrementToken()) self.assertEqual("Qwerty", termAtt.toString(), "WhitespaceAnalyzer does not lowercase") tokenStream = analyzer.tokenStream("special", StringReader(text)) tokenStream.reset() termAtt = tokenStream.getAttribute(CharTermAttribute.class_) self.assert_(tokenStream.incrementToken()) self.assertEqual("qwerty", termAtt.toString(), "SimpleAnalyzer lowercases")
def testUpdateAssignment(self): """TurnitinReviewServiceImpl.updateAssignment TODO: I think this is suppose to actually test updating the due date. I believe the occasional running of doAssignments is to get around the 5 month due date limitation.""" tiiasnnid = "/unittests/asnnupdate/"+str(uuid.uuid1()) tiisiteid = str(uuid.uuid1()) opts = HashMap() opts.put('journal_check','1') self.tiireview_serv.createAssignment(tiisiteid, tiiasnnid, opts) Thread.sleep(1000) tiiresult = self.tiireview_serv.getAssignment(tiisiteid, tiiasnnid) Thread.sleep(1000) self.assertEquals(str(tiiresult['object']['searchjournals']),str('1')) Thread.sleep(1000) self.tiireview_serv.updateAssignment(tiisiteid, tiiasnnid) Thread.sleep(1000) tiiresult = self.tiireview_serv.getAssignment(tiisiteid, tiiasnnid) self.assertEquals(str(tiiresult['object']['searchjournals']),str('1'))
def getUserName(self, userStr): """ userStr could be an identifier of a user, e.g. an email address not a username, this function currently only verifies it against email address attribute stored in HibernateAuthUserAttribute table and return a real username if found, otherwise, return original query string Warning: this function does not check if a user exists """ parameters = HashMap() # print "Checking user info by %s (could be email)" % userStr parameters.put("key", "email") parameters.put("value", userStr) authUserAttrbDao = ApplicationContextProvider.getApplicationContext( ).getBean("hibernateAuthUserAttributeDao") userObjectList = authUserAttrbDao.query( "getUserAttributeByKeyAndValue", parameters) if userObjectList.size() > 0: userObject = userObjectList.get( 0) #Check if this is a user with attributes? realUser = userObject.getUser() if realUser is not None: userStr = realUser.getUsername() return '{"realName":"' + userStr + '"}'
def getCurrencyAccounts(self, book): # print "getCurrencyAccounts " import java.util.HashMap as HashMap import com.infinitekind.moneydance.model.AccountUtil as AccountUtil import com.infinitekind.moneydance.model.AcctFilter as AcctFilter import com.infinitekind.moneydance.model.Account.AccountType as AccountType accounts = HashMap() for acct in AccountUtil.allMatchesForSearch(book.getRootAccount(), AcctFilter.ACTIVE_ACCOUNTS_FILTER ): curr = acct.getCurrencyType() account = accounts.get(curr)# this returns None if curr doesn't exist yet if acct.getCurrentBalance() != 0 and acct.getAccountType() == AccountType.SECURITY: pass else: continue # no sense slowing everything down with stuff we don't need . only some BONDS left mixed in with the STOCK if account == None: account = str(acct.getParentAccount()) else: account = account + ' : ' + str(acct.getParentAccount()) # concatinate two strings here accounts.put(curr,account) return accounts
def _loader_dict(self, cmd={}): self.loader_map = dict() for bucket in self.cluster.buckets: for scope in bucket.scopes.keys(): for collection in bucket.scopes[scope].collections.keys(): if collection == "_default" and scope == "_default": continue ws = WorkLoadSettings( cmd.get("keyPrefix", self.key), cmd.get("keySize", self.key_size), cmd.get("docSize", self.doc_size), cmd.get("cr", self.create_perc), cmd.get("rd", self.read_perc), cmd.get("up", self.update_perc), cmd.get("dl", self.delete_perc), cmd.get("ex", self.expiry_perc), cmd.get("workers", self.process_concurrency), cmd.get("ops", self.ops_rate), cmd.get("loadType", None), cmd.get("keyType", None), cmd.get("valueType", None), cmd.get("validate", False), cmd.get("gtm", False), cmd.get("deleted", False), cmd.get("mutated", 0)) hm = HashMap() hm.putAll({ DRConstants.create_s: self.create_start, DRConstants.create_e: self.create_end, DRConstants.update_s: self.update_start, DRConstants.update_e: self.update_end, DRConstants.expiry_s: self.expire_start, DRConstants.expiry_e: self.expire_end, DRConstants.delete_s: self.delete_start, DRConstants.delete_e: self.delete_end, DRConstants.read_s: self.read_start, DRConstants.read_e: self.read_end }) dr = DocRange(hm) ws.dr = dr dg = DocumentGenerator(ws, self.key_type, self.val_type) self.loader_map.update( {bucket.name + scope + collection: dg})
def execute(self): self.loginName = request.getAttribute("loginName") writer = response.getWriter() # 加载当前用户对象. self.user = user_svc.getUserByLoginName(self.loginName) if self.user == None: writer.write(u"无法加载当前用户。") return self.userName = self.user.loginName fc = FileCache() content = fc.getUserFileCacheContent(self.userName, "user_preparecourse.html", 14400) if content != "": response.getWriter().write(content) fc = None return #得到当前用户的创建的活动 qry = PrepareCourseQuery( """ pc.createUserId,pc.leaderId,pc.memberCount,pc.articleCount,pc.resourceCount, pc.status,pc.actionCount,pc.topicCount,pc.topicReplyCount,pc.viewCount,pc.startDate,pc.endDate, pc.title, pc.prepareCourseId, pc.createDate, pc.metaSubjectId, pc.gradeId, u.loginName,u.trueName """) qry.status = 0 qry.createUserId = self.user.userId course_list = qry.query_map(10) templateProcessor = __spring__.getBean("templateProcessor") map = HashMap() map.put("course_list", course_list) map.put("user", self.user) map.put("UserSiteUrl", self.getUserSiteUrl()) content = templateProcessor.processTemplate( map, "/WEB-INF/user/default/user_preparecourse.ftl", "utf-8") fc.writeUserFileCacheContent(self.userName, "user_preparecourse.html", content) response.getWriter().write(content) fc = None
def rawoaasolve(agent, sparkgoal, sparkparams, sparkpattern=None): oaa = _oaa(agent) iclgoal = value_to_icl(sparkgoal) iclparams = value_to_icl(sparkparams) iclanswers = IclList() debug("calling oaaSolve on %s", iclgoal) functor = iclgoal.functor logInfo("rawoaasolve[%s]" % functor) result = oaa.oaaSolve(iclgoal, iclparams, iclanswers) logInfo("rawoaasolve complete[%s]" % functor) if result: debug("oaaSolve returned success: %s", iclanswers) if not iclanswers.isList(): raise LowError( "The call to oaaSolve returned a non-IclList answer: %s" % iclanswers) if sparkpattern is None: ans = icl_to_value(iclanswers) else: bindings = HashMap() iclpattern = value_to_icl(sparkpattern) anslist = [] for iclans in iclanswers.iterator(): bindings.clear() if UNIFIER.matchTerms(iclans, iclgoal, bindings): anslist.append( icl_to_value(UNIFIER.deref(iclpattern, bindings))) else: raise LowError( "The call to oaaSolve returned an answer that doesn't unify with the query:\nans=%s\query=%s" % (iclans, iclgoal)) ans = List(anslist) logInfo("rawoaasolve answers deconstructed[%s]" % functor) return ans else: debug("oaaSolve return failure: %s", iclanswers) logError("rawoaasolve return failure with answers [%s]" % iclanswers) raise OAAError("The call to oaaSolve [goal %s] failed and returned %s"\ %(functor, iclanswers))
def __activate__(self, context): self.None = context["log"] self.systemConfig = context["systemConfig"] self.sessionState = context["sessionState"] self.response = context["response"] self.request = context["request"] self.sessionState.set("username", "admin") self.writer = self.response.getPrintWriter("text/plain; charset=UTF-8") curationJobDao = ApplicationContextProvider.getApplicationContext( ).getBean("curationJobDao") publicationHandler = ApplicationContextProvider.getApplicationContext( ).getBean("publicationHandler") jobs = curationJobDao.query("findInProgressJobs", HashMap()) self.writer.println(jobs.size()) for curationJob in jobs: if curationJob.getCurationJobId() is not None: self.writer.println(curationJob.getCurationJobId()) else: self.writer.println("Null huh") jobStatus = self.queryJobStatus(curationJob) self.writer.println(jobStatus.toString()) status = jobStatus.getString("failed", "status") self.writeResponseToStatusResponseCache( jobStatus.getInteger(None, "job_id"), jobStatus) self.writer.println(status) if "complete" == status: publicationHandler.publishRecords( jobStatus.getArray("job_items")) curationJob.setStatus(status) curationJobDao.create(curationJob) else: if "failed" == status: curationJob.setStatus(status) curationJobDao.create(curationJob) self.writer.close() self.sessionState.remove("username")
def __init__(self, index_dir, search_fields=['canonical_url', 'title', 'meta', 'content'], unique_field='uq_id_str', boost=dict(canonical_url=4.0, title=8.0, meta=2.0, content=1.0), date_format='%Y-%m-%dT%H:%M:%S'): """Constructor of Searcher. Parameters ---------- index_dir : string The location of lucene index. search_fields : list A list of field names indicating fields to search on. unique_field : string The field name, on which the duplication should avoid. boost : dict This dict control the weight when computing score. date_format : string Convert the string into datetime. Should consistent with the index part. """ self.index_dir = index_dir self.search_fields = search_fields self.sort_by_recent = Sort( SortField('date_published', SortField.Type.STRING, True)) self.store = FSDirectory.open(File(index_dir)) self.reader = DirectoryReader.open(self.store) self.isearcher = IndexSearcher(self.reader) self.analyzer = StandardAnalyzer() self.dup_filter = DuplicateFilter(unique_field) self.boost_map = HashMap() for k, v in boost.iteritems(): self.boost_map.put(k, Float(v)) self.mul_parser = MultiFieldQueryParser(search_fields, self.analyzer, self.boost_map) self.date_format = date_format
def genernate_links_content(self, webpart): cache_key = "sbj" + str(self.subject.subjectId) + "_" + str( webpart.getSubjectWebpartId()) + self.cacheKeyFix content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return map = HashMap() links = self.siteLinksService.getSiteLinksList("subject", self.subject.subjectId) if links != None: map.put("links", links) map.put("subject", self.subject) map.put("SubjectRootUrl", self.subjectRootUrl) map.put("webpart", webpart) map.put("unitId", self.unitId) content = self.templateProcessor.processTemplate( map, "/WEB-INF/subjectpage/" + self.templateName + "/links.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def genernate_group_content(self, webpart): cache_key = "unit" + str(self.unit.unitId) + "_" + str( webpart.getUnitWebpartId()) content = cache.get(cache_key) if content != None: request.setAttribute(cache_key, content) return qry = UnitGroupQuery("ug.groupName,ug.groupTitle") qry.unitId = self.unit.unitId unitGroupList = qry.query_map(10) map = HashMap() map.put("unit", self.unit) map.put("webpart", webpart) map.put("UnitRootUrl", self.unitRootUrl) map.put("unitGroupList", unitGroupList) content = self.templateProcessor.processTemplate( map, "/WEB-INF/unitspage/" + self.templateName + "/unit_group.ftl", "utf-8") request.setAttribute(cache_key, content) cache.put(cache_key, content)
def start_pr_release(repo_full_name, pr_number, pr_title, comment): tag = 'pull_request_merger' pr_templates = templateApi.getTemplates(tag) if not pr_templates: raise Exception('Could not find any templates by tag [pull_request_merger]. ' 'Did the xlr-development-workflow-plugin initializer run?') else: if len(pr_templates) > 1: logger.warn("Found more than one template with tag '%s', using the first one" % tag) template_id = pr_templates[0].id params = StartRelease() params.setReleaseTitle('Merge PR #%s: %s' % (pr_number, pr_title)) variables = HashMap() variables.put('${pull_request_number}', '%s' % pr_number) variables.put('${pull_request_title}', '%s' % pr_title) variables.put('${repository_full_name}', '%s' % repo_full_name) variables.put('${pull_request_comment}', '%s' % comment) params.setReleaseVariables(variables) started_release = templateApi.start(template_id, params) response.entity = started_release logger.info("Started release %s to merge pull request %s" % (started_release.getId(), pr_number))
def getMintLabels(self, urlName, key, suffix): mapIds = HashMap() valList = self.getList(key) self.log.debug(valList.toString()) ids = "" for eKey in valList.keySet(): entry = valList.get(eKey) if len(ids) > 0: ids = "%s," % ids ids = "%s%s" % (ids, entry.get(suffix)) if ids == "": return None else: labels = ArrayList() mapIds.put("id", ids) labelsMint = MintLookupHelper.get(self.systemConfig, urlName, mapIds) self.log.debug(labelsMint.getJsonArray().toString()) for label in labelsMint.getJsonArray(): labelJson = JsonSimple(label) labels.add(labelJson.getString("", "label")) return labels
def start_pr_release( proj_name, repo_name, pr_number, pr_title, comment, source_hash, target_hash, tag="pull_request_merger", ): pr_templates = templateApi.getTemplates(tag) if not pr_templates: raise Exception( "Could not find any templates by tag [pull_request_merger]. " "Did the xlr-development-workflow-plugin initializer run?" ) else: if len(pr_templates) > 1: logger.warn( "Found more than one template with tag '%s', using the first one" % tag ) template_id = pr_templates[0].id params = StartRelease() params.setReleaseTitle("Pull Request #%s: %s" % (pr_number, pr_title)) variables = HashMap() variables.put("pull_request_number", "%s" % pr_number) variables.put("pull_request_title", "%s" % pr_title) variables.put("repository_name", "%s" % repo_name) variables.put("pull_request_comment", "%s" % comment) variables.put("proj_name", "%s" % proj_name) variables.put("fromHash", "%s" % source_hash) variables.put("toHash", "%s" % target_hash) params.setReleaseVariables(variables) started_release = templateApi.start(template_id, params) response.entity = started_release logger.info( "Started release %s for Pull Request %s" % (started_release.getId(), pr_number) )
def execute(self): self.pluginService = __spring__.getBean("pluginService") if self.pluginService.checkPluginEnabled("questionanswer") == False: request.setAttribute("message",u"该插件已经被管理员禁用。") return "/WEB-INF/mod/show_text.ftl" if self.parentGuid == "" or self.parentType == "": return "/WEB-INF/mod/questionanswer/not_found.ftl" pageIndex = self.params.safeGetStringParam("page") if pageIndex.isdigit() == False: pageIndex = "1" qry = QuestionQuery(""" q.questionId,q.topic,q.createDate,q.createUserId,q.createUserName, q.objectGuid,q.createUserId, q.createUserName """) qry.parentGuid = self.parentGuid pager = self.params.createPager() pager.itemName = u"问题" pager.itemUnit = u"个" pager.pageSize = 20 pager.setCurrentPage(int(pageIndex)) pager.totalRows = qry.count() q_list = qry.query_map(pager) map = HashMap() map.put("SiteUrl",self.pageFrameService.getSiteUrl()) map.put("UserMgrUrl",self.pageFrameService.getUserMgrUrl()) map.put("q_list", q_list) map.put("pager", pager) map.put("parentGuid", self.parentGuid) map.put("parentType", self.parentType) pagedata = self.pageFrameService.transformTemplate(map, "/WEB-INF/mod/questionanswer/listall.ftl") page_frame = self.pageFrameService.getFramePage(self.parentGuid,self.parentType) page_frame = page_frame.replace("[placeholder_content]",pagedata) page_frame = page_frame.replace("[placeholder_title]",u"全部问题列表") self.writeToResponse(page_frame)
def nx_update_custom_attr(self): ''' Check _categoryName on node, if it is not set to the current value (or missing) update it ''' required_attrs = { '_categoryName': self.category, 'disableUnreachableAlarms': str(int(self.category in (self.WORKSTATION, self.PORTABLE, self.PRINTER))), 'disableInterfaceAlarms': str(int(self.category in (self.WORKSTATION, self.PORTABLE, self.PRINTER))) } for attr in required_attrs: ca = self.node.customAttributes.get(attr, None) if ca and ca.value == required_attrs[attr]: del required_attrs[attr] if required_attrs: md = NXCObjectModificationData(self.node.getObjectId()) attr_map = HashMap(self.node.getCustomAttributes()) flags = CustomAttribute.INHERITABLE for attr in required_attrs: attr_map.put(attr, CustomAttribute(str(required_attrs[attr]), flags, 0)) md.setCustomAttributes(attr_map) self.session.modifyObject(md)
def scan_request(path=None, monitorsPerPoint=None, monitorsPerScan=None, det=None, file=None, allow_preprocess=False): """Create a ScanRequest object with the given configuration. See the mscan() docstring for usage. """ try: assert path is not None except AssertionError: raise ValueError('Scan request must have a scan path.') # The effect of the following three lines is to make square brackets # optional when calling this function with length-1 lists. I.e. we can do # either scan([grid(...)], ...) or scan(grid(...), ...). Also _stringify # the monitors so users can pass either a monitor name in quotes or a # scannable object from the Jython namespace. scan_paths = _listify(path) monitorNamesPerPoint = ArrayList(map(_stringify, _listify(monitorsPerPoint))) monitorNamesPerScan = ArrayList(map(_stringify, _listify(monitorsPerScan))) detectors = _listify(det) (scan_path_models, _) = zip(*scan_paths) # zip(* == unzip( # ScanRequest expects CompoundModel cmodel = CompoundModel() for (model, rois) in scan_paths: cmodel.addData(model, rois) # (Again, use a HashMap, not a Python dict.) detector_map = HashMap() for (name, model) in detectors: detector_map[name] = model return _instantiate(ScanRequest, {'compoundModel': cmodel, 'filePath' : file, 'monitorNamesPerPoint': monitorNamesPerPoint, 'monitorNamesPerScan': monitorNamesPerScan, 'detectors': detector_map, 'ignorePreprocess': not allow_preprocess})
def testCheckAgainstInstitutionRepository(self): """ institution_check / searchinstitution values of 0 to not check against institutional repository, 1 check against it. """ opts = HashMap() opts.put('institution_check','1') tiiasnnid = "/unittests/useinstitution/"+str(uuid.uuid1()) tiisiteid = str(uuid.uuid1()) # Test creating an assignment checked against Journals self.tiireview_serv.createAssignment(tiisiteid, tiiasnnid, opts) tiiresult = self.tiireview_serv.getAssignment(tiisiteid, tiiasnnid) self.assertEquals(str(tiiresult['object']['searchinstitution']),str('1')) # Test creating an assignment checked against Journals opts.put('institution_check','0') tiiasnnid = "/unittests/noinstitution/"+str(uuid.uuid1()) self.tiireview_serv.createAssignment(tiisiteid, tiiasnnid, opts) tiiresult = self.tiireview_serv.getAssignment(tiisiteid, tiiasnnid) self.assertEquals(str(tiiresult['object']['searchinstitution']),str('0'))
def __init__(self, parent=None, type_=None): # stays null for most scopes (mem opt) # all are non-null except global table # link to the closest non-class scope, for lifting functions out # .... if not hasattr(self, 'scopeType'): self.scopeType = None self.supers = List() self.globalNames = Set() self.type_ = None self.path = "" self.parent = parent self.setScopeType(type_) self.forwarding = None self.table = Map() if isinstance(parent, Scope) and type_ is None: s = parent # case of creating a new scope from an existing one if s.table is not None: self.table = HashMap() self.table.update(s.table) self.parent = s.parent self.setScopeType(s.scopeType) self.forwarding = s.forwarding self.supers = s.supers self.globalNames = s.globalNames self.type_ = s.type_ self.path = s.path elif parent is not None: self.parent = parent self.setScopeType(type_) if type_ == self.ScopeType.CLASS: self.forwarding = (None if parent is None else parent.getForwarding()) else: self.forwarding = self
def execute(self): if self.parentGuid == "" or self.parentType == "": self.addActionError(u"无效的标识。") return self.ERROR pageIndex = self.params.safeGetStringParam("page") if pageIndex.isdigit() == False: pageIndex = "1" qry = VoteQuery( """ vote.voteId,vote.title,vote.createDate,vote.endDate """) qry.parentGuid = self.parentGuid qry.parentObjectType = self.parentType pager = self.params.createPager() pager.itemName = u"调查" pager.itemUnit = u"个" pager.pageSize = 20 pager.setCurrentPage(int(pageIndex)) pager.totalRows = qry.count() vote_list = qry.query_map(pager) map = HashMap() map.put("SiteUrl", self.pageFrameService.getSiteUrl()) map.put("UserMgrUrl", self.pageFrameService.getUserMgrUrl()) map.put("vote_list", vote_list) map.put("pager", pager) map.put("loginUser", self.loginUser) map.put("parentGuid", self.parentGuid) map.put("parentType", self.parentType) pagedata = self.pageFrameService.transformTemplate( map, "/WEB-INF/mod/vote/morevote.ftl") page_frame = self.pageFrameService.getFramePage( self.parentGuid, self.parentType) page_frame = page_frame.replace("[placeholder_content]", pagedata) page_frame = page_frame.replace("[placeholder_title]", u"全部调查投票") self.writeToResponse(page_frame)
def execute(self): self.userName = request.getAttribute("loginName") if self.userName == None or self.userName == "": response.getWriter().write(u"没有该用户。") return user = self.userService.getUserByLoginName(self.userName) if user == None: response.getWriter().write(u"不能加载当前用户。") return fc = FileCache() content = fc.getUserFileCacheContent(self.userName, "user_leaveword.html", 60) if content != "": response.getWriter().write(content) fc = None return count = self.params.safeGetIntParam("count") if count == 0: count = 10 qry = LeaveWordQuery( " lwd.userId,lwd.createDate, lwd.loginName, lwd.title, lwd.content" ) qry.objId = user.userId qry.objType = 1 qry.orderType = 0 leaveword_list = qry.query_map(count) templateProcessor = __spring__.getBean("templateProcessor") map = HashMap() map.put("user", user) map.put("leaveword_list", leaveword_list) map.put("UserSiteUrl", self.getUserSiteUrl()) content = templateProcessor.processTemplate( map, "/WEB-INF/user/default/user_leaveword.ftl", "utf-8") fc.writeUserFileCacheContent(self.userName, "user_leaveword.html", content) response.getWriter().write(content) fc = None
def checkLock(Framework): probe = CollectorsParameters.getValue(CollectorsParameters.KEY_PROBE_MGR_IP) if (probe is None) or (len(str(probe)) == 0): logger.debug('Probe manager ip is not specified in the DiscoveryProbe.properties file, using probe ID') probe = CollectorsParameters.getValue(CollectorsParameters.KEY_COLLECTORS_PROBE_NAME) jobType = INVENTORY_JOB_TYPE jobId = Framework.getDiscoveryJobId() lockTime = System.currentTimeMillis() lockExpiration = System.currentTimeMillis() + LOCK_AGE_PERIOD_MILLISECONDS lock = Lock(probe, jobType, jobId, lockTime, lockExpiration) logger.debug('Checking remote lock with current lock:', str(lock.getLockInfo())) triggerid = Framework.getTriggerCIData('id') logger.debug('Checking lock for probe ', probe, ' and jobid ', jobId, ' and triggerid ', triggerid) client = Framework.getConnectedClient() options = getClientOptionsMap(client) lockOption = options.get(ScannerNodeLock) if (lockOption is None) or (len(lockOption.strip()) == 0): logger.debug('Lock on scanner node for probe "' + lock.probe + '" and job "' + lock.jobId + '" is not exists') return 0 remoteLock = extractLock(lockOption) logger.debug('Found remote lock:', str(remoteLock.getLockInfo())) if remoteLock.isLockExpired(): logger.debug('Lock on remote node is already expired, renewing lock on the node') options = HashMap() options.put(ScannerNodeLock, lock.getLockInfo()) client.setOptionsMap(options) elif not lock.isSameLock(remoteLock): logger.debug( 'Lock on remote node is owned by another probe/job (' + remoteLock.probe + '/' + remoteLock.jobId + ')') if remoteLock.jobType == CALLHOME_JOB_TYPE: return ScannerNodeLockedByCallHome return 0 return 1
def execute(self): self.userName = request.getAttribute("UserName") fc = FileCache() content = fc.getUserFileCacheContent(self.userName, "user_joined_groups.html", 14400) if content != "": response.getWriter().write(content) fc = None return count = self.params.safeGetIntParam("count") if count == 0: count = 10 user = self.userService.getUserByLoginName(self.userName) if self.canVisitUser(user) == False: response.writer.println(u"用户 " + self.userName + u" 不存在 ") return # 得到参加的协作组列表. qry = GroupMemberQuery( """ gm.id, gm.group, g.groupName, g.groupId, g.groupTitle, g.userCount """ ) qry.userId = user.userId qry.memberStatus = 0 qry.groupStatus = 0 qry.orderType = 0 group_list = qry.query_map(count) templateProcessor = __spring__.getBean("templateProcessor") map = HashMap() map.put("user", user) map.put("group_list", group_list) map.put("UserSiteUrl", self.getUserSiteUrl()) content = templateProcessor.processTemplate( map, "/WEB-INF/user/default/user_joined_groups.ftl", "utf-8") fc.writeUserFileCacheContent(self.userName, "user_joined_groups.html", content) response.getWriter().write(content) fc = None