class ErrorResolver: "Resolves errors to error configs" def __init__(self, data=None): #list of keys is maintained in order to ensure the #predictable iteration order self.configsMap = LinkedHashMap() if data: self.configsMap.putAll(data) self.initDefaultConfigs() def addConfig(self, pattern, config): self.configsMap.put(pattern, config) def hasConfig(self, msg): iterator = self.configsMap.keySet().iterator() while iterator.hasNext(): pattern = iterator.next() if re.search(pattern, msg, re.I | re.M): return 1 def getConfig(self, msg): iterator = self.configsMap.keySet().iterator() while iterator.hasNext(): pattern = iterator.next() if re.search(pattern, msg, re.I | re.M): return self.configsMap.get(pattern) def getDefaultConfig(self): return self.defaultConfig def copy(self): return ErrorResolver(self.configsMap) def __len__(self): return self.configsMap.size() def __getitem__(self, key): return self.configsMap.get(key) def __setitem__(self, key, item): self.addConfig(key, item) def keys(self): return self.configsMap.keySet() def initDefaultConfigs(self): self.defaultConfigWithDetails = ErrorMessageConfig( ERROR_GENERIC_WITH_DETAILS, errorcodes.INTERNAL_ERROR_WITH_PROTOCOL_DETAILS) self.defaultConfig = ErrorMessageConfig(ERROR_CONNECTION_FAILED, errorcodes.CONNECTION_FAILED) self.defaultConfigNoProtocol = ErrorMessageConfig( ERROR_CONNECTION_FAILED_NO_PROTOCOL, errorcodes.CONNECTION_FAILED_NO_PROTOCOL) self.defaultConfigNoProtocolWithDetails = ErrorMessageConfig( ERROR_CONNECTION_FAILED_NO_PROTOCOL_WITH_DETAILS, errorcodes.CONNECTION_FAILED_NO_PROTOCOL_WITH_DETAILS)
def getSuggestedNames(self): # search common forms lookupNames = [] surname = self.__metadata.getList("surname").get(0) firstName = self.__metadata.getList("firstName").get(0) firstInitial = firstName[0].upper() secondName = self.__metadata.getList("secondName") if not secondName.isEmpty(): secondName = secondName.get(0) if secondName and secondName != "": secondInitial = secondName[0].upper() lookupNames.append("%s, %s. %s." % (surname, firstInitial, secondInitial)) lookupNames.append("%s, %s %s." % (surname, firstName, secondInitial)) lookupNames.append("%s, %s %s" % (surname, firstName, secondName)) lookupNames.append("%s %s %s" % (firstName, secondName, surname)) lookupNames.append("%s, %s." % (surname, firstInitial)) lookupNames.append("%s, %s" % (surname, firstName)) lookupNames.append("%s %s" % (firstName, surname)) query = '" OR dc_title:"'.join(lookupNames) # general word search from each part of the name parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0] query2 = " OR dc_title:".join(parts) req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5' % (query, query2)) self.log.info("suggestedNames query={}", req.query) req.setParam("fq", 'recordtype:"author"') req.addParam("fq", 'item_type:"object"') req.setParam("rows", "9999") req.setParam("fl", "score") req.setParam("sort", "score desc") # Make sure 'fq' has already been set in the session ##security_roles = self.authentication.get_roles_list(); ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")' ##req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer = self.services.getIndexer() indexer.search(req, out) result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray())) # self.log.info("result={}", result.toString()) docs = result.getJsonList("response/docs") map = LinkedHashMap() for doc in docs: authorName = doc.getList("dc_title").get(0) if map.containsKey(authorName): authorDocs = map.get(authorName) else: authorDocs = ArrayList() map.put(authorName, authorDocs) authorDocs.add(doc) self.__maxScore = max(1.0, float(result.get("response/maxScore"))) return map
class WorkflowsData: def __activate__(self, context): self.roles = context["page"].authentication.get_roles_list() self.config = context["systemConfig"] workflows = JsonSimple.toJavaMap(self.config.getObject(["uploader"])) self.uploaders = LinkedHashMap() for workflow in workflows.keySet(): if workflows.get(workflow).getString("", ["upload-template"]): for role in workflows.get(workflow).getArray(["security"]): if str(role) in self.roles: self.uploaders.put(workflow, workflows.get(workflow)) def getUploaders(self): return self.uploaders def getStageInfo(self, workflowId): uploader = self.uploaders.get(workflowId) config = JsonSimple(File(StrSubstitutor.replaceSystemProperties(uploader.getString("", ["json-config"])))) return StageInfo(config.getArray(["stages"]))
algo.setAnalyticMetric(DistanceMetric(options.size, options.point_metric_file)) elif setAnalytic: algo.setAnalyticMetric(RemeshMetric()); algo.compute(); #MeshWriter.writeObject3D(algo.getOutputMesh(), outDir, String()) if options.recordFile: liaison.getMesh().getTrace().finish() # Now compute beams bgroupMap = LinkedHashMap() newMesh = algo.getOutputMesh() #print "beams size: "+str(mesh.getBeams().size()) for i in xrange(newMesh.getBeams().size() / 2): bId = newMesh.getBeamGroup(i) listBeamId = bgroupMap.get(bId) if listBeamId is None: listBeamId = TIntArrayList(100) bgroupMap.put(bId, listBeamId) listBeamId.add(i) vertices = ArrayList(newMesh.getBeams()) newMesh.resetBeams() mapGroupToListOfPolylines = LinkedHashMap() for bId in bgroupMap.keySet(): listBeamId = bgroupMap.get(bId) listOfPolylines = ArrayList() polyline = ArrayList() lastVertex = None for i in xrange(listBeamId.size()): b = listBeamId.get(i)
outDir = args[1] if options.size: size = options.size else: size = 1.0 mtb = MeshTraitsBuilder.getDefault3D() mtb.addNodeList() mesh = Mesh(mtb) MeshReader.readObject3D(mesh, xmlDir) bgroupMap = LinkedHashMap() #print "beams size: "+str(mesh.getBeams().size()) for i in xrange(mesh.getBeams().size() / 2): bId = mesh.getBeamGroup(i) listBeamId = bgroupMap.get(bId) if listBeamId is None: listBeamId = TIntArrayList(100) bgroupMap.put(bId, listBeamId) listBeamId.add(i) vertices = ArrayList(mesh.getBeams()) mesh.resetBeams() mapGroupToListOfPolylines = LinkedHashMap() for bId in bgroupMap.keySet(): listBeamId = bgroupMap.get(bId) listOfPolylines = ArrayList() polyline = ArrayList() lastVertex = None for i in xrange(listBeamId.size()): b = listBeamId.get(i)
def getSuggestedNames(self): # search common forms lookupNames = [] surname = self.__metadata.getList("surname").get(0) firstName = self.__metadata.getList("firstName").get(0) firstInitial = firstName[0].upper() secondName = self.__metadata.getList("secondName") if not secondName.isEmpty(): secondName = secondName.get(0) if secondName and secondName != "": secondInitial = secondName[0].upper() lookupNames.append("%s, %s. %s." % (surname, firstInitial, secondInitial)) lookupNames.append("%s, %s %s." % (surname, firstName, secondInitial)) lookupNames.append("%s, %s %s" % (surname, firstName, secondName)) lookupNames.append("%s %s %s" % (firstName, secondName, surname)) lookupNames.append("%s, %s." % (surname, firstInitial)) lookupNames.append("%s, %s" % (surname, firstName)) lookupNames.append("%s %s" % (firstName, surname)) query = '" OR dc_title:"'.join(lookupNames) # general word search from each part of the name parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0] query2 = " OR dc_title:".join(parts) req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5' % (query, query2)) self.log.info("suggestedNames query={}", req.query) req.setParam("fq", 'recordtype:"author"') req.addParam("fq", 'item_type:"object"') req.setParam("rows", "9999") req.setParam("fl", "score") req.setParam("sort", "score desc") # Make sure 'fq' has already been set in the session ##security_roles = self.authentication.get_roles_list(); ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")' ##req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer = self.services.getIndexer() indexer.search(req, out) result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray())) #self.log.info("result={}", result.toString()) docs = result.getJsonList("response/docs") map = LinkedHashMap() for doc in docs: authorName = doc.getList("dc_title").get(0) if map.containsKey(authorName): authorDocs = map.get(authorName) else: authorDocs = ArrayList() map.put(authorName, authorDocs) authorDocs.add(doc) self.__maxScore = max(1.0, float(result.get("response/maxScore"))) return map
def getSuggestedNames(self): # search common forms lookupNames = [] surname = self.__metadata.getList("surname").get(0) firstName = self.__metadata.getList("firstName").get(0) firstInitial = firstName[0].upper() secondName = self.__metadata.getList("secondName") if not secondName.isEmpty(): secondName = secondName.get(0) if secondName and secondName != "": secondInitial = secondName[0].upper() lookupNames.append("%s, %s. %s." % (surname, firstInitial, secondInitial)) lookupNames.append("%s, %s %s." % (surname, firstName, secondInitial)) lookupNames.append("%s, %s %s" % (surname, firstName, secondName)) lookupNames.append("%s %s %s" % (firstName, secondName, surname)) lookupNames.append("%s, %s." % (surname, firstInitial)) lookupNames.append("%s, %s" % (surname, firstName)) lookupNames.append("%s %s" % (firstName, surname)) query = '" OR dc_title:"'.join(lookupNames) # general word search from each part of the name parts = [p for p in self.getPackageTitle().split(" ") if len(p) > 0] query2 = " OR dc_title:".join(parts) #filter out the linked citation linkedCitations = self.__manifest.getList("//children//id") query3 = "" if linkedCitations: query3 = " OR ".join(linkedCitations) query3 = " AND -id:(%s)" % query3 req = SearchRequest('(dc_title:"%s")^2.5 OR (dc_title:%s)^0.5%s' % (query, query2, query3)) self.log.info("suggestedNames query={}", req.query) req.setParam("fq", 'recordtype:"author"') req.addParam("fq", 'item_type:"object"') req.setParam("rows", "9999") req.setParam("fl", "score") req.setParam("sort", "score desc") # Make sure 'fq' has already been set in the session ##security_roles = self.authentication.get_roles_list(); ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")' ##req.addParam("fq", security_query) out = ByteArrayOutputStream() self.__indexer.search(req, out) result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray())) #self.log.info("result={}", result.toString()) docs = result.getJsonList("response/docs") exactMatchRecords = LinkedHashMap() map = LinkedHashMap() idList = [] for doc in docs: authorName = doc.getList("dc_title").get(0) rank = self.getRank(doc.getList("score").get(0)) id = doc.get("id") idList.append(id) #try to do automatch if float(rank) == 100.00 and self.isModified() == "false": if exactMatchRecords.containsKey(authorName): authorMatchDocs = exactMatchRecords.get(authorName) else: authorMatchDocs = ArrayList() exactMatchRecords.put(authorName, authorMatchDocs) authorMatchDocs.add(doc) elif id not in linkedCitations: if map.containsKey(authorName): authorDocs = map.get(authorName) else: authorDocs = ArrayList() map.put(authorName, authorDocs) authorDocs.add(doc) self.__maxScore = max(1.0, float(result.get("response/maxScore"))) if idList: self.__isLinked(idList, map) # Do not auto save if record is live if self.__workflowMetadata.get("modified") == "false": self.__autoSaveExactRecord(exactMatchRecords) return map