def __activate__(self, context): self.log = context["log"] self.request = context["request"] self.sessionState = context["sessionState"] self.sessionState.set("username", "admin") processingSet = self.request.getParameter("processingSet") self.procMsg = None # read configuration and trigger processing stream sets # storing the return object on the map configFilePath = FascinatorHome.getPath( "process") + "/processConfig.json" procConfigFile = File(configFilePath) if procConfigFile.exists() == True: self.dataMap = HashMap() self.dataMap.put("indexer", context['Services'].getIndexer()) self.procConfigJson = JsonSimple(procConfigFile) for configObj in self.procConfigJson.getJsonArray(): configJson = JsonSimple(configObj) procId = configJson.getString("", "id") if processingSet is not None: if procId == processingSet: self.execProcSet(procId, configJson) else: self.execProcSet(procId, configJson) if self.procMsg is None: self.procMsg = "Processing complete!" else: self.procMsg = "Configuration file does not exist: " + configFilePath
def __activate__(self, context): self.velocityContext = context formData = self.vc("formData") # build the URL and query parameters to retrieve proxyUrls = JsonSimple(self.vc("systemConfig").getObject("proxy-urls")) url = "" key = formData.get("ns", "") if proxyUrls.getJsonObject().containsKey(key): url = proxyUrls.getString("", [key]) queryStr = formData.get("qs") if queryStr == "searchTerms={searchTerms}": queryStr = None if queryStr: if formData.get("jaffa2autocomplete", "false") == "true": url += "?searchTerms=%s" % queryStr.lower() else: url += "?%s" % queryStr self.vc("log").debug("Proxy URL = '{}'", url) data = None try: data = self.__wget(url) except Exception, e: data = '{"error":"%s"}' % str(e) self.vc("log").error("ERROR accessing URL:", e)
def __activate__(self, context): self.velocityContext = context formData = self.vc("formData") # build the URL and query parameters to retrieve proxyUrls = JsonSimple(self.vc("systemConfig").getObject("proxy-urls")) url = "" key = formData.get("ns", "") if proxyUrls.getJsonObject().containsKey(key): url = proxyUrls.getString("", [key]) queryStr = formData.get("qs") if queryStr == "searchTerms={searchTerms}": queryStr = None if queryStr: if formData.get("jaffa2autocomplete", "false") == "true": url += "?searchTerms=%s" % queryStr else: url += "?%s" % queryStr self.vc("log").debug("Proxy URL = '{}'", url) data = None try: data = self.__wget(url) except Exception, e: data = '{"error":"%s"}' % str(e) self.vc("log").error("ERROR accessing URL:", e)
def __checkMetadataPayload(self): try: # Simple check for its existance self.object.getPayload("formData.tfpackage") self.firstHarvest = False except Exception: self.firstHarvest = True # We need to create it self.log.info("Creating 'formData.tfpackage' payload for object '{}'", self.oid) # Prep data data = { "viewId": "default", "workflow_source": "Edgar Import", "packageType": "dataset", "redbox:formVersion": self.redboxVersion, "redbox:newForm": "true" } package = JsonSimple(JsonObject(data)) # Store it inStream = IOUtils.toInputStream(package.toString(True), "UTF-8") try: self.object.createStoredPayload("formData.tfpackage", inStream) self.packagePid = "formData.tfpackage" except StorageException, e: self.log.error("Error creating 'formData.tfpackage' payload for object '{}'", self.oid, e) raise Exception("Error creating package payload: ", e)
def __messages(self): if self.message_list is not None and len(self.message_list) > 0: msg = JsonSimple() msg.getJsonObject().put("oid", self.oid) message = msg.toString() for target in self.message_list: self.utils.sendMessage(target, message)
def updateLocalRecordRelations(self, jobItems): oidIdentifierMap = HashMap() for jobItem in jobItems: oidIdentifierMap.put(jobItem.get("oid"),jobItem.get("required_identifiers")[0].get("identifier")) for jobItem in jobItems: type = jobItem.get("type"); targetSystem = self.systemConfig.getString(None, "curation", "supported-types", type); if targetSystem == "redbox": oid = jobItem.get("oid") digitalObject = StorageUtils.getDigitalObject(self.services.getStorage(), oid) tfPackagePid = self.getPackageData(digitalObject) metadataJsonPayload = digitalObject.getPayload(tfPackagePid) metadataJsonInstream = metadataJsonPayload.open() metadataJson = JsonSimple(metadataJsonInstream) metadataJsonPayload.close() relationships = metadataJson.getArray("relationships") if relationships is not None: for relationship in relationships: system = relationship.get("system") if system != "redbox" or system != None: url = self.systemConfig.getString("can't find it", "curation","external-system-urls","get-oid-for-identifier",system) client = BasicHttpClient(url+ "&identifier="+relationship.get("identifier")) get = GetMethod(url+ "&identifier="+relationship.get("identifier")) client.executeMethod(get) if get.getStatusCode() == 200: response = JsonSimple(get.getResponseBodyAsString()) relationship.put("curatedPid",oidIdentifierMap.get(response.getString(None,"oid"))) relationship.put("isCurated",True) #Now update the relationship on Mint's side break istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes()) StorageUtils.createOrUpdatePayload(digitalObject,tfPackagePid,istream)
def updateRelationships(self, relationship,pid,identifier): oid = self.findOidByIdentifier(relationship.get("identifier")) self.writer.println(oid) digitalObject = StorageUtils.getDigitalObject(self.storage, oid) metadataJsonPayload = digitalObject.getPayload("metadata.json") metadataJsonInstream = metadataJsonPayload.open() metadataJson = JsonSimple(metadataJsonInstream) metadataJsonPayload.close() relationships = metadataJson.getArray("relationships") found = False if relationships is None: relationships = JSONArray() metadataJson.getJsonObject().put("relationships",relationships) for relationship1 in relationships: if relationship1.get("identifier") == identifier: relationship1.put("isCurated",True) relationship1.put("curatedPid",pid) found = True if not found: newRelationship = JsonObject() newRelationship.put("isCurated",True) newRelationship.put("curatedPid",pid) newRelationship.put("relationship",relationship.get("relationship")) newRelationship.put("identifier",identifier) relationships.add(newRelationship) istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes()) StorageUtils.createOrUpdatePayload(digitalObject,"metadata.json",istream)
def __activate__(self, context): self.log = context["log"] self.request = context["request"] self.sessionState = context["sessionState"] self.sessionState.set("username","admin") processingSet = self.request.getParameter("processingSet") self.procMsg = None # read configuration and trigger processing stream sets # storing the return object on the map configFilePath = FascinatorHome.getPath("process")+"/processConfig.json" procConfigFile = File(configFilePath) if procConfigFile.exists() == True: self.dataMap = HashMap() self.dataMap.put("indexer", context['Services'].getIndexer()) self.procConfigJson = JsonSimple(procConfigFile) for configObj in self.procConfigJson.getJsonArray(): configJson = JsonSimple(configObj) procId = configJson.getString("", "id") if processingSet is not None: if procId == processingSet: self.execProcSet(procId, configJson) else: self.execProcSet(procId, configJson) if self.procMsg is None: self.procMsg = "Processing complete!" else: self.procMsg = "Configuration file does not exist: " + configFilePath
def __activate__(self, context): self.log = context["log"] self.request = context["request"] self.sessionState = context["sessionState"] self.setting = JsonSimple(context["systemConfig"].getObject("tim.notification")) self.sessionState.set("username","admin") # read configuration and trigger processing stream sets # storing the return object on the map self.dataMap = HashMap() self.dataMap.put("indexer", context['Services'].getIndexer()) url = self.setting.getString("","url") data = self.__wget(url) json = JsonSimple(data) if json.getInteger(0,["response","numFound"]) > 0 : username = self.setting.getString("",["email","username"]) password = self.setting.getString("",["email","password"]) body = self.setting.getString("",["email","body"]) to = self.setting.getString("",["email","to"]) if self.setting.getString("",["email","testmode"]) == "true" : body = body + "<p>TESTMODE: Was sent to " + to to = self.setting.getString("",["email","redirect"]) email = HtmlEmail() email.setAuthenticator(DefaultAuthenticator(username, password)) email.setHostName(self.setting.getString("localhost",["email","host"])) email.setSmtpPort(self.setting.getInteger(25,["email","port"])) email.setSSL(self.setting.getBoolean(False,["email","ssl"])) email.setTLS(self.setting.getBoolean(False,["email","tls"])) email.setFrom(self.setting.getString("",["email","from"])) email.setSubject(self.setting.getString("Action Required in TIM",["email","subject"])) email.addTo(to) email.setHtmlMsg(body) email.send()
def parseFFmpeg(self, parent): if parent is not None: object = parent.getObject() if object is not None: payload = None try: payload = object.getPayload("ffmpeg.info") # Stream the content out to string out = ByteArrayOutputStream() IOUtils.copy(payload.open(), out) payload.close() self.__ffmpegRaw = out.toString("UTF-8") out.close() payload.close() # And parse it self.__ffmpegData = JsonSimple(self.__ffmpegRaw) if self.__ffmpegData is None: return False else: self.__ffmpegOutputs = self.__ffmpegData.getJsonSimpleMap( ["outputs"]) return True except: if payload is not None: payload.close() return False
def updateObjectMetadata(self, objectMetaData, toWorkflowId): packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile( toWorkflowId) workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = File(workflowsDir, jsonConfigFile) configObject = StorageUtils.checkHarvestFile(self.storage, configFile) if configObject is None: oid = StorageUtils.generateOid(configFile) configObject = StorageUtils.getDigitalObject(self.storage, oid) objectMetaData.setProperty("jsonConfigPid", jsonConfigFile) objectMetaData.setProperty("jsonConfigOid", configObject.getId()) configJson = JsonSimple(configFile) rulesFileName = configJson.getString(None, "indexer", "script", "rules") rulesFile = File(workflowsDir, rulesFileName) rulesObject = StorageUtils.checkHarvestFile(self.storage, rulesFile) if rulesObject is None: oid = StorageUtils.generateOid(rulesFile) rulesObject = StorageUtils.getDigitalObject(self.storage, oid) objectMetaData.setProperty("rulesPid", rulesFileName) objectMetaData.setProperty("rulesOid", rulesObject.getId()) objectMetaData.setProperty("workflowTransitioned", "true") objectMetaData.setProperty( "date_transitioned", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
def __checkMetadataPayload(self): try: # Simple check for its existance self.object.getPayload("formData.tfpackage") self.firstHarvest = False except Exception: self.firstHarvest = True # We need to create it self.log.info( "Creating 'formData.tfpackage' payload for object '{}'", self.oid) # Prep data data = { "viewId": "default", "workflow_source": "Edgar Import", "packageType": "dataset", "redbox:formVersion": self.redboxVersion, "redbox:newForm": "true" } package = JsonSimple(JsonObject(data)) # Store it inStream = IOUtils.toInputStream(package.toString(True), "UTF-8") try: self.object.createStoredPayload("formData.tfpackage", inStream) self.packagePid = "formData.tfpackage" except StorageException, e: self.log.error( "Error creating 'formData.tfpackage' payload for object '{}'", self.oid, e) raise Exception("Error creating package payload: ", e)
def __formData(self): # Find our workflow form data packagePid = None try: self.pidList = self.object.getPayloadIdList() for pid in self.pidList: if pid.endswith(self.packagePidSuffix): packagePid = pid except StorageException: self.log.error("Error accessing object PID list for object '{}' ", self.oid) return if packagePid is None: self.log.debug("Object '{}' has no form data", self.oid) return # Retrieve our form data workflowData = None try: payload = self.object.getPayload(packagePid) try: workflowData = JsonSimple(payload.open()) except Exception: self.log.error("Error parsing JSON '{}'", packagePid) finally: payload.close() except StorageException: self.log.error("Error accessing '{}'", packagePid) return # Test our version data self.version = workflowData.getString("{NO VERSION}", ["redbox:formVersion"]) oldData = String(workflowData.toString(True)) if self.version != self.redboxVersion: self.log.info("OID '{}' requires an upgrade: '{}' => '{}'", [self.oid, self.version, self.redboxVersion]) # The version data is old, run our upgrade # function to see if any alterations are # required. Most likely at least the # version number will change. newWorkflowData = self.__upgrade(workflowData) else: newWorkflowData = self.__hotfix(workflowData) if newWorkflowData is not None: self.log.debug("OID '{}' was hotfixed for v1.2 'dc:type' bug", self.oid) else: self.log.debug("OID '{}' requires no work, skipping", self.oid) return # Backup our data first backedUp = self.__backup(oldData) if not backedUp: self.log.error("Upgrade aborted, data backup failed!") return # Save the newly modified data jsonString = String(newWorkflowData.toString(True)) inStream = ByteArrayInputStream(jsonString.getBytes("UTF-8")) try: self.object.updatePayload(packagePid, inStream) except StorageException, e: self.log.error("Error updating workflow payload: ", e)
def __activate__(self, context): self.auth = context["page"].authentication self.errorMsg = "" self.request = context["request"] self.response = context["response"] self.formData = context["formData"] self.storage = context["Services"].getStorage() self.log = context["log"] self.reportManager = context["Services"].getService("reportManager") fromOid = self.formData.get("fromOid") fromObject = self.storage.getObject(fromOid) if (self.auth.is_logged_in()): if (self.auth.is_admin() == True): pass elif (self.__isOwner(fromObject)): pass else: self.errorMsg = "Requires Admin / Librarian / Reviewer / owner access." else: self.errorMsg = "Please login." if self.errorMsg == "": toOid = self.formData.get("toOid") toObject = self.storage.getObject(toOid) storeRelatedData = self.formData.get("relatedData") fromTFPackage = self._getTFPackage(fromObject) toTFPackage = self._getTFPackage(toObject) fromInputStream = fromTFPackage.open() try: StorageUtils.createOrUpdatePayload(toObject, toTFPackage.getId(), fromInputStream) except StorageException: print "error setting tfPackage" fromTFPackage.close() fromTFPackageJson = JsonSimple(fromTFPackage.open()).getJsonObject() if storeRelatedData != "false" : # add relatedOid info fromTFPackageJson = self._addRelatedOid(JsonSimple(fromTFPackage.open()), toOid) inStream = IOUtils.toInputStream(fromTFPackageJson.toJSONString(), "UTF-8") try: StorageUtils.createOrUpdatePayload(fromObject, fromTFPackage.getId(), inStream) except StorageException: print "error setting tfPackage" tfMetaPropertyValue = self.formData.get("tfMetaPropertyValue") self._addPropertyValueToTFMeta(toObject, tfMetaPropertyValue) self._reharvestPackage() result = '{"status": "ok", "url": "%s/workflow/%s", "oid": "%s" }' % (context["portalPath"], toOid , toOid) else: result = '{"status": "err", "message": "%s"}' % self.errorMsg writer = self.response.getPrintWriter("application/json; charset=UTF-8") writer.println(result) writer.close()
def __activate__(self, context): self.request = context["request"] self.response = context["response"] self.formData = context["formData"] self.log = context["log"] # Basic response text message = JsonSimple() self.metadata = message.writeObject(["metadata"]) self.results = message.writeArray(["results"]) # Prepare response Object format = self.formData.get("format") if format == "json": out = self.response.getPrintWriter("application/json; charset=UTF-8") else: out = self.response.getPrintWriter("text/plain; charset=UTF-8") # Success Response try: self.searchNla() out.println(message.toString(True)) out.close() except Exception, ex: self.log.error("Error during search: ", ex) self.response.setStatus(500) message = JsonSimple() message.getJsonObject().put("error", ex.getMessage()) out.println(message.toString(True)) out.close()
def __activate__(self, context): self.None = context["log"] self.systemConfig = context["systemConfig"] self.sessionState = context["sessionState"] self.response = context["response"] self.request = context["request"] self.services = context["Services"] self.sessionState.set("username", "admin") self.writer = self.response.getPrintWriter("text/plain; charset=UTF-8") curationJobDao = ApplicationContextProvider.getApplicationContext().getBean("curationJobDao") publicationHandler = ApplicationContextProvider.getApplicationContext().getBean("publicationHandler") jobs = JsonSimple(File(FascinatorHome.getPath() + "/curation-status-responses/inProgressJobs.json")).getArray("inProgressJobs") self.writer.println(jobs.size()) for curationJob in jobs: curationJob jobStatus = self.queryJobStatus(curationJob) self.writer.println(jobStatus.toString()) status = jobStatus.getString("failed", "status") self.writeResponseToStatusResponseCache(jobStatus.getInteger(None, "job_id"), jobStatus) self.writer.println(status) if "complete" == status: publicationHandler.publishRecords(jobStatus.getArray("job_items")) self.updateLocalRecordRelations(jobStatus.getArray("job_items")) self.writer.close() self.sessionState.remove("username")
def __hotfix(self, formData): oldType = formData.getString(None, ["dc:type"]) newType = formData.getString(None, ["dc:type.rdf:PlainLiteral"]) if oldType != newType or newType is None: self.log.debug("Bugged Type?: v1.4: '{}', OLD: '{}'", newType, oldType) else: ## No fix required return None ## Get Backup data ## NOTE: The only known production system affected by this bug ## was caught during a v1.4 upgrade. Alter this line if required. pid = "1.4.workflow.backup" oldData = None try: payload = self.object.getPayload(pid) try: oldData = JsonSimple(payload.open()) except Exception: self.log.error("Error parsing JSON '{}'", pid) finally: payload.close() except StorageException: self.log.error("Error accessing '{}'", pid) return None oldType = oldData.getString(None, ["dc:type"]) self.log.debug("Old Type: '{}' => 'dc:type.rdf:PlainLiteral'", oldType) formData.getJsonObject().put("dc:type.rdf:PlainLiteral", oldType) return formData
def getCurationData(self, oid): json = JsonObject() try: # Get the object from storage storage = self.Services.getStorage() object = storage.getObject(oid) # Find the package payload payload = object.getPayload("metadata.json") # Not found? if payload is None: self.log.error(" * detail.py => Can't find package data!") json.put("error", True) return json # Parse the data data = JsonSimple(payload.open()) payload.close() # Return it json.put("error", False) json.put("relationships", data.writeArray("relationships")) return json except StorageException, ex: self.log.error(" * detail.py => Storage Error accessing data: ", ex) json.put("error", True) return json
def __activate__(self, context): self.velocityContext = context oid = self.vc("formData").get("oid") print "--- Creating ePub for: %s ---" % oid try: self.__epubMimetypeStream = None self.__epubContainerStream = None self.__epubcss = None self.__orderedItem = [] self.__itemRefDict = {} # get the package manifest object = Services.getStorage().getObject(oid) sourceId = object.getSourceId() payload = object.getPayload(sourceId) self.__manifest = JsonSimple(payload.open()) payload.close() object.close() # create the epub self.__getDigitalItems( self.__manifest.getJsonSimpleMap("manifest")) self.__createEpub() except Exception, e: log.error("Failed to create epub", e) self.vc("response").setStatus(500) writer = self.vc("response").getPrintWriter( "text/plain; charset=UTF-8") writer.println(str(e)) writer.close()
def updateObjectMetadata(self, objectMetaData, toWorkflowId): packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile(toWorkflowId) workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = File(workflowsDir, jsonConfigFile) configObject = StorageUtils.checkHarvestFile(self.storage, configFile); if configObject is None: oid = StorageUtils.generateOid(configFile); configObject = StorageUtils.getDigitalObject(self.storage, oid); objectMetaData.setProperty("jsonConfigPid", jsonConfigFile) objectMetaData.setProperty("jsonConfigOid", configObject.getId()) configJson = JsonSimple(configFile) rulesFileName = configJson.getString(None, "indexer","script","rules") rulesFile = File(workflowsDir,rulesFileName) rulesObject = StorageUtils.checkHarvestFile(self.storage, rulesFile); if rulesObject is None: oid = StorageUtils.generateOid(rulesFile); rulesObject = StorageUtils.getDigitalObject(self.storage, oid); objectMetaData.setProperty("rulesPid", rulesFileName) objectMetaData.setProperty("rulesOid", rulesObject.getId()) objectMetaData.setProperty("workflowTransitioned", "true") objectMetaData.setProperty("date_transitioned", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
def __activate__(self, context): request = context["request"] storage = context["Services"].getStorage() auth = context["page"].authentication log = context["log"] username = auth.get_name() oid = request.getParameter("oid") approval = request.getParameter("approval") approval_comment = request.getParameter("approval_comment") storedObj = storage.getObject(oid) committeeResponses = None payloadList = storedObj.getPayloadIdList() if payloadList.contains("committee-responses.metadata"): committeeResponsePayload = storedObj.getPayload("committee-responses.metadata") committeeResponses = JsonSimple(committeeResponsePayload.open()).getJsonObject() else: committeeResponses = JsonObject() committeeResponse = JsonObject() committeeResponse.put("approval",approval) committeeResponse.put("approval_comment",approval_comment) committeeResponses.put(username,committeeResponse) log.debug(" %s: Committee %s, approval = %s, comment = %s" % ( oid, username, approval, approval_comment)) StorageUtils.createOrUpdatePayload(storedObj,"committee-responses.metadata",IOUtils.toInputStream(committeeResponses.toString(), "UTF-8")) context["response"].sendRedirect(context["portalPath"] +"/detail/"+oid)
def get_image(self): self.type = "http://www.purl.org/anotar/ns/type/0.1#Tag" mediaFragType = "http://www.w3.org/TR/2009/WD-media-frags-20091217" result = '{"result":' + self.search_solr() + "}" if result: imageTagList = [] imageTags = JsonSimple(result).getJsonSimpleList(["result"]) for imageTag in imageTags: imageAno = JsonSimple() # We only want tags with locators, not basic tags locators = imageTag.getJsonSimpleList(["annotates", "locators"]) if locators and not locators.isEmpty(): locatorValue = locators.get(0).getString(None, ["value"]) locatorType = locators.get(0).get(None, ["type"]) if locatorValue and locatorValue.find("#xywh=") > -1 and locatorType == mediaFragType: _, locatorValue = locatorValue.split("#xywh=") left, top, width, height = locatorValue.split(",") object = imageAno.getJsonObject() object.put("top", top) object.put("left", left) object.put("width", width) object.put("height", height) object.put("creator", imageTag.getString(None, ["creator", "literal"])) object.put("creatorUri", imageTag.getString(None, ["creator", "uri"])) object.put("id", imageTag.getString(None, ["id"])) # tagCount = imageTag.getString(None, ["tagCount"]) object.put("text", imageTag.getString(None, ["content", "literal"])) object.put("editable", "true") imageTagList.append(imageAno.toString()) result = "[" + ",".join(imageTagList) + "]" return result
def __hotfix(self, formData): oldType = formData.getString(None, ["dc:type"]) newType = formData.getString(None, ["dc:type.rdf:PlainLiteral"]) if oldType != newType or newType is None: self.log.debug("Bugged Type?: v1.4: '{}', OLD: '{}'", newType, oldType) else: ## No fix required return None ## Get Backup data ## NOTE: The only known production system affected by this bug ## was caught during a v1.4 upgrade. Alter this line if required. pid = "1.4.workflow.backup" oldData = None try: payload = self.object.getPayload(pid) try: oldData = JsonSimple(payload.open()) except Exception: self.log.error("Error parsing JSON '{}'", pid) finally: payload.close() except StorageException: self.log.error("Error accessing '{}'", pid) return None oldType = oldData.getString(None, ["dc:type"]) self.log.debug("Old Type: '{}' => 'dc:type.rdf:PlainLiteral'", oldType) formData.getJsonObject().put("dc:type.rdf:PlainLiteral", oldType); return formData
def __activate__(self, context): response = context["response"] writer = response.getPrintWriter("text/plain; charset=UTF-8") auth = context["page"].authentication result = JsonSimple() obj = result.getJsonObject() obj.put("status", "error") obj.put("message", "An unknown error has occurred") if auth.is_logged_in(): services = context["Services"] formData = context["formData"] sessionState = context["sessionState"] urlBase = context["urlBase"] if urlBase.endswith("/"): urlBase = urlBase[:-1] func = formData.get("func") portalManager = services.portalManager if func == "create-view": try: fq = [q for q in sessionState.get("fq") if q != 'item_type:"object"'] id = formData.get("id") description = formData.get("description") print "Creating view '%s': '%s'" % (id, description) portal = Portal(id) portal.setDescription(formData.get("description")) portal.setQuery(" OR ".join(fq)) portal.setSearchQuery(sessionState.get("searchQuery")) portal.setFacetFields(portalManager.default.facetFields) portalManager.add(portal) portalManager.save(portal) obj.put("status", "ok") obj.put("message", "View '%s' successfully created" % id) obj.put("url", "%s/%s/home" % (urlBase, id)) except Exception, e: response.setStatus(500) obj.put("message", str(e)) elif func == "delete-view": defaultPortal = context["defaultPortal"] portalId = formData.get("view") if auth.is_admin(): if not portalId: response.setStatus(500) obj.put("message", "No view specified to be deleted") elif portalId != defaultPortal: # sanity check: don't delete default portal print "Deleting view '%s'" % portalId try: portalManager.remove(portalId) obj.put("status", "ok") obj.put("message", "View '%s' successfully removed" % portalId) obj.put("url", "%s/%s/home" % (urlBase, defaultPortal)) except Exception, e: obj.put("message", str(e)) else: response.setStatus(500) obj.put("message", "The default view cannot be deleted") else: response.setStatus(403) obj.put("message", "Only administrative users can access this API")
def get_file_list_as_json(self, rootPath, pattern): json = JsonSimple().getJsonObject() for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): match = os.path.join(root, filename) self.log.debug("appending %s to file list" % match) json.put(match, filename) return json
def __getPackageTypes(self): object = self.sysConfig.getObject(["portal", "packageTypes"]) packageTypes = JsonSimple.toJavaMap(object) if packageTypes.isEmpty(): defaultPackage = JsonObject() defaultPackage.put("jsonconfig", "packaging-config.json") packageTypes.put("default", JsonSimple(defaultPackage)) return packageTypes
def getLabel(self, jsonFile, key): value = self.metadata.get(key) jsonLabelFile = System.getProperty("fascinator.home") + jsonFile entries = JsonSimple(File(jsonLabelFile)).getJsonArray() for entry in entries: entryJson = JsonSimple(entry) if value == entryJson.getString("", "value"): return entryJson.getString("", "label") return None
def updatePackageType(self, tfPackage, toWorkflowId): tfPackageJson = JsonSimple(tfPackage.open()).getJsonObject() tfPackageJson.put("packageType", toWorkflowId) inStream = IOUtils.toInputStream(tfPackageJson.toString(), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, tfPackage.getId(), inStream) except StorageException: print " ERROR updating dataset payload"
def __activate__(self, context): try: self.log = context["log"] self.response = context["response"] self.request = context["request"] self.systemConfig = context["systemConfig"] self.storage = context["Services"].getStorage() self.indexer = context["Services"].getIndexer() self.sessionState = context["sessionState"] self.sessionState.set("username", "admin") out = self.response.getPrintWriter("text/plain; charset=UTF-8") relationshipMapper = ApplicationContextProvider.getApplicationContext().getBean("relationshipMapper") externalCurationMessageBuilder = ApplicationContextProvider.getApplicationContext().getBean("externalCurationMessageBuilder") oid = self.request.getParameter("oid") if oid is None : identifier = self.request.getParameter("identifier") oid = self.findOidByIdentifier(identifier) relationshipType = self.request.getParameter("relationship") curatedPid = self.request.getParameter("curatedPid") sourceId = self.request.getParameter("sourceIdentifier") system = self.request.getParameter("system") digitalObject = StorageUtils.getDigitalObject(self.storage, oid) metadataJsonPayload = digitalObject.getPayload("metadata.json") metadataJsonInstream = metadataJsonPayload.open() metadataJson = JsonSimple(metadataJsonInstream) metadataJsonPayload.close() relationships = metadataJson.getArray("relationships") found = False for relationship in relationships: if relationship.get("identifier") == sourceId: relationship.put("isCurated",True) relationship.put("curatedPid",curatedPid) found = True if not found: relationship = JsonObject() relationship.put("isCurated",True) relationship.put("curatedPid",curatedPid) relationship.put("relationship",relationshipType) relationship.put("identifier",sourceId) relationship.put("system",system) relationships.add(relationship) out.println(metadataJson.toString(True)) istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes()) StorageUtils.createOrUpdatePayload(digitalObject,"metadata.json",istream) out.close() finally: self.sessionState.remove("username")
def __createNew(self): self.vc("log").debug("Creating a new package...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() self.vc("log").debug("packageType = '{}'", packageType) self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) manifestHash = "%s.tfpackage" % uuid.uuid4() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") self.vc("sessionState").set("package/active", None) manifest = self.__getActiveManifest() manifest.setType(packageType) metaList = list(self.vc("formData").getValues("metaList")) jsonObj = manifest.getJsonObject() for metaName in metaList: value = self.vc("formData").get(metaName) jsonObj.put(metaName, value) if self.vc("formData").getValues("sequencesMetaList") != None: sequenceService = ApplicationContextProvider.getApplicationContext().getBean("sequenceService") sequencesMetaList = list(self.vc("formData").getValues("sequencesMetaList")) for sequenceInfo in sequencesMetaList: sequenceInfoJson = JsonSimple(sequenceInfo) sequenceIndex = sequenceService.getSequence(sequenceInfoJson.getString(None,"sequenceName")) jsonObj.put(sequenceInfoJson.getString(None,"metadataName"), String.format(sequenceInfoJson.getString(None,"stringFormat"),sequenceIndex)) self.vc("log").debug("json object created is: %r" % jsonObj) outWriter.write(manifest.toString(True)) outWriter.close() # adding ability to set access plugin accessPlugin = self.vc("formData").get("access_plugin", "derby") if accessPlugin is not None: self.vc("page").authentication.set_access_plugin(accessPlugin) try: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, creating if necessary workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def __activate__(self, context): self.None = context["log"] self.systemConfig = context["systemConfig"] self.sessionState = context["sessionState"] self.response = context["response"] self.request = context["request"] self.indexer = context["Services"].getIndexer() self.storage = context["Services"].getStorage() self.log = context["log"] self.sessionState.set("username", "admin") self.writer = self.response.getPrintWriter("text/plain; charset=UTF-8") publishedRecords = self.findPublishedRecords() for publishedRecord in publishedRecords: digitalObject = StorageUtils.getDigitalObject( self.storage, publishedRecord.getString(None, "storage_id")) tfPackage = self.getTfPackage(digitalObject) metadata = digitalObject.getMetadata() configObject = StorageUtils.getDigitalObject( self.storage, metadata.getProperty("jsonConfigOid")) payload = configObject.getPayload( metadata.getProperty("jsonConfigPid")) inStream = payload.open() jsonConfig = JsonSimple(inStream) payload.close() requiredIdentifiers = jsonConfig.getArray("curation", "requiredIdentifiers") if requiredIdentifiers is not None: pidName = self.systemConfig.getString(None, "curation", "identifier-pids", requiredIdentifiers[0]) pid = metadata.getProperty(pidName) identifier = tfPackage.getString(pid, "metadata", "dc.identifier") relationships = tfPackage.getArray("relationships") if relationships is not None: for relationship in relationships: self.writer.println(relationship) if relationship.get("broker") is None: if relationship.get( "system" ) is not None and relationship.get( "system") != self.systemConfig.getString( None, "system"): self.writer.println( "notifyExternalRelationship") self.notifyExternalRelationship( relationship, pid, relationship.get("system"), identifier) else: self.updateRelationships(relationship, pid) self.writer.close() self.sessionState.remove("username")
def updateWorkFlowMetadata(self, workflowMetadata, toWorkflowId, toWorkflowStage, pageTitle, label): workflowMetaDataJson = JsonSimple(workflowMetadata.open()).getJsonObject() workflowMetaDataJson.put("id", toWorkflowId) workflowMetaDataJson.put("step", toWorkflowStage) workflowMetaDataJson.put("pageTitle", pageTitle) workflowMetaDataJson.put("label", label) inStream = IOUtils.toInputStream(workflowMetaDataJson.toString(), "UTF-8") try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException: print " ERROR updating dataset payload"
def _readReviewers(self, storedObj, tfpackage): """Read from TFPACKAGE for reviewer's recommendation and map to a json with short keys: reviewer-recommend-for : for reviewer-recommended-storage : storage """ reviewersPayload = storedObj.getPayload(tfpackage) reviewersRecommends = JsonSimple(reviewersPayload.open()).getJsonObject() reviewers = JsonObject() reviewers.put("for", reviewersRecommends.get("reviewer-recommend-for")) reviewers.put("storage", reviewersRecommends.get("reviewer-recommended-storage")) return reviewers
def modify_json(self): # print "**** anotar.py : add_json() : adding json : " + json jsonSimple = JsonSimple(self.json) jsonObj = jsonSimple.getJsonObject() jsonObj.put("id", self.pid) rootUri = jsonSimple.getString(None, ["annotates", "rootUri"]) if rootUri is not None: baseUrl = "http://%s:%s/" % (self.vc("request").serverName, self.vc("serverPort")) myUri = baseUrl + rootUri + "#" + self.pid jsonObj.put("uri", myUri) jsonObj.put("schemaVersionUri", "http://www.purl.org/anotar/schema/0.1") self.json = jsonSimple.toString()
def execProcessors(self, procId, configJson, dataMap, stageName): for procObj in configJson.getArray(stageName): procJson = JsonSimple(procObj) procClassName = procJson.getString("", "class") procConfigPath = procJson.getString("", "config") procInputKey = procJson.getString("", "inputKey") procOutputKey = procJson.getString("", "outputKey") procClass = Class.forName(procClassName) procInst = procClass.newInstance() procMethod = procClass.getMethod("process", self.get_class("java.lang.String"),self.get_class("java.lang.String"), self.get_class("java.lang.String"),self.get_class("java.lang.String"),self.get_class("java.lang.String"), self.get_class("java.util.HashMap")) procMethod.invoke(procInst, procId, procInputKey, procOutputKey, stageName, procConfigPath, dataMap)
def process_tags(self, result): tags = [] tagsDict = {} # Build a dictionary of the tags for doc in result: # Get Anotar data from Solr data doc = JsonSimple(doc.get("jsonString")) # Get actual tag text tag = doc.getString(None, ["content", "literal"]) # Find out if they have locators locs = doc.getJsonSimpleList(["annotates", "locators"]).size() if locs == 0: # Basic tags, just aggregate counts if tag in tagsDict: # We've seen it before, just increment the counter existing = tagsDict[tag] count = existing.getInteger(0, ["tagCount"]) existing.getJsonObject().put("tagCount", str(count + 1)) else: # First time, store this object doc.getJsonObject().put("tagCount", str(1)) tagsDict[tag] = doc else: # Tags with a locator, special case for images etc. tags.append(doc.toString()) # Push all the 'basic' counts into the list to return for tag in tagsDict: tags.append(tagsDict[tag].toString()) return "[" + ",".join(tags) + "]"
def modify_json(self): #print "**** anotar.py : add_json() : adding json : " + json jsonSimple = JsonSimple(self.json) jsonObj = jsonSimple.getJsonObject() jsonObj.put("id", self.pid) rootUri = jsonSimple.getString(None, ["annotates", "rootUri"]) if rootUri is not None: baseUrl = "http://%s:%s/" % (self.vc("request").serverName, self.vc("serverPort")) myUri = baseUrl + rootUri + "#" + self.pid jsonObj.put("uri", myUri) jsonObj.put("schemaVersionUri", "http://www.purl.org/anotar/schema/0.1") self.json = jsonSimple.toString()
def __upgrade(self, formData): # These fields are handled specially ignoredFields = ["metaList", "redbox:formVersion", "redbox:newForm"] # Prepare a new JSON setup for upgraded data newJsonSimple = JsonSimple() newJsonObject = newJsonSimple.getJsonObject() metaList = newJsonSimple.writeArray(["metaList"]) oldJsonObject = formData.getJsonObject() for key in oldJsonObject.keySet(): oldField = str(key) if oldField not in ignoredFields: newField = self.__parseFieldName(oldField) metaList.add(newField) newJsonObject.put(newField, oldJsonObject.get(key)) # Form management newJsonObject.put("redbox:formVersion", self.redboxVersion) newForm = oldJsonObject.get("redbox:newForm") if newForm is not None: newJsonObject.put("redbox:newForm", newForm) ######### # Some final custom modifications more complicated than most fields ######### # Old URL checkbox 'on' equals new ID Origin 'internal' urlOrigin = oldJsonObject.get("url_useRecordId") if urlOrigin is not None and urlOrigin == "on": newJsonObject.put("dc:identifier.redbox:origin", "internal") # Related data should default to being unlinked if from legacy forms counter = 1 template = "dc:relation.vivo:Dataset" newIdField = "%s.%s.dc:identifier" % (template, counter) while newJsonObject.containsKey(newIdField): newOriginField = "%s.%s.redbox:origin" % (template, counter) newJsonObject.put(newOriginField, "external") newPublishField = "%s.%s.redbox:publish" % (template, counter) newJsonObject.put(newPublishField, "off") counter += 1 newIdField = "%s.%s.dc:identifier" % (template, counter) self.audit.add( "Migration tool. Version upgrade performed '%s' => '%s'" % (self.version, self.redboxVersion)) return newJsonSimple
def __init__(self, file, config, baseline): AlertHandler.__init__(self, file, config, baseline) docFactory = DocumentFactory() self.saxReader = SAXReader(docFactory) self.xmlMapFile = StrSubstitutor.replaceSystemProperties(config['xmlMap']) if not os.path.exists(self.xmlMapFile): raise AlertException("Requested xmlMap file %s does not exist." % self.xmlMapFile) ## Make sure we can see our mappings inStream = FileInputStream(File(self.xmlMapFile)) xmlMappings = JsonSimple(inStream) self.map = xmlMappings.getObject(["mappings"]) self.exceptions = xmlMappings.getObject(["exceptions"]) self.defaultNamespace = xmlMappings.getObject(["defaultNamespace"]) self.mappedExceptionCount = 0
def process(self): #We'll return a list with 1 JsonSimple object jsonList = [] data = None reader = None inStream = None document = None # Run the XML through our parser try: inStream = FileInputStream(File(self.file)) reader = InputStreamReader(inStream, "UTF-8") document = self.saxReader.read(reader) # Parse fails except: raise # Close our file access objects finally: if reader is not None: reader.close() if inStream is not None: inStream.close() # Now go looking for all our data data = JsonObject() data.put("workflow_source", "XML Alert") # Default self.__mapXpathToFields(document, self.map, data) if data is None: return None jsonList.append(JsonSimple(data)) return jsonList
def getMintLabelByLookup(self, urlName, key, resKey, valKey): mapIds = HashMap() value = String(self.metadata.get(key)).replace(":", "\:") if value is None: return None labels = ArrayList() mapIds.put("searchTerms", value) labelsMint = MintLookupHelper.get(self.systemConfig, urlName, mapIds) self.log.debug(labelsMint.toString()) resultsArr = labelsMint.getArray(resKey) if resultsArr is None: return None for result in resultsArr: labelJson = JsonSimple(result) labels.add(labelJson.getString("", valKey)) return labels
def process(self): '''Read the XML file and map xpath items to metadata Return a list with 1 JsonSimple object (at most) ''' jsonList = [] data = None reader = None inStream = None document = None # Run the XML through our parser try: inStream = FileInputStream(File(self.file)) reader = InputStreamReader(inStream, "UTF-8") document = self.saxReader.read(reader) # Parse fails except: raise # Close our file access objects finally: if reader is not None: reader.close() if inStream is not None: inStream.close() # Now go looking for all our data data = self.getNewJsonObject() self.__mapXpathToFields(document, self.map, data) if data is None: return None jsonList.append(JsonSimple(data)) return jsonList
def parseFFmpeg(self, parent): if parent is not None: object = parent.getObject() if object is not None: payload = None try: payload = object.getPayload("ffmpeg.info") # Stream the content out to string out = ByteArrayOutputStream() IOUtils.copy(payload.open(), out) payload.close() self.__ffmpegRaw = out.toString("UTF-8") out.close() payload.close() # And parse it self.__ffmpegData = JsonSimple(self.__ffmpegRaw) if self.__ffmpegData is None: return False else: self.__ffmpegOutputs = self.__ffmpegData.getJsonSimpleMap(["outputs"]) return True except: if payload is not None: payload.close() return False
def __getJsonData(self, pid): data = None object = self.__getObject() payload = object.getPayload(pid) data = JsonSimple(payload.open()) payload.close() return data
def listHomes(self): term = self.request.getParameter("term") writer = self.response.getPrintWriter("application/json; charset=UTF-8") homeJsonBlock = JsonSimple(self.homeConfigFile) writer.println("[") count = 0 for homeObj in homeJsonBlock.getArray("institutions"): if term is not None: if homeObj.get("name").lower().find(term.lower()) > -1: count = count +1 self.printHome(writer, homeObj, count) else: count = count + 1 self.printHome(writer, homeObj, count) writer.println("]") writer.close()
def __activate__(self, context): self.response = context["response"] self.request = context["request"] self.systemConfig = context["systemConfig"] out = self.response.getPrintWriter("text/plain; charset=UTF-8") callType = self.request.getParameter("callType") apiClass = self.systemConfig.getObject("api").get(callType) className = apiClass.get("className") apiCallClass = Class.forName(className) apiCallObject = apiCallClass.newInstance() setScriptingServiceMethod = apiCallClass.getMethod( "setScriptingServices", self.get_class( "com.googlecode.fascinator.portal.services.ScriptingServices")) setScriptingServiceMethod.invoke(apiCallObject, context['Services']) if callType == "mint-stats": setScriptingServiceMethod = apiCallClass.getMethod( "setConfig", self.get_class("com.googlecode.fascinator.common.JsonSimple")) setScriptingServiceMethod.invoke( apiCallObject, JsonSimple(self.systemConfig.getObject("api", "mint-stats"))) handleRequestMethod = apiCallClass.getMethod( "handleRequest", self.get_class("org.apache.tapestry5.services.Request")) responseString = handleRequestMethod.invoke(apiCallObject, context["request"]) out.println(responseString) out.close()
def __activate__(self, context): try: self.log = context["log"] self.response = context["response"] self.request = context["request"] self.systemConfig = context["systemConfig"] self.storage = context["Services"].getStorage() self.indexer = context["Services"].getIndexer() self.sessionState = context["sessionState"] self.sessionState.set("username", "admin") out = self.response.getPrintWriter("text/plain; charset=UTF-8") relationshipMapper = ApplicationContextProvider.getApplicationContext( ).getBean("relationshipMapper") externalCurationMessageBuilder = ApplicationContextProvider.getApplicationContext( ).getBean("externalCurationMessageBuilder") builder = StringBuilder() aux = "" reader = self.httpRequest.getReader() aux = reader.readLine() while aux is not None: builder.append(aux) aux = reader.readLine() requestJsonString = builder.toString() requestJson = JsonSimple(requestJsonString) # out.println(relationshipMapJsonObject.toString(True)) out.close() finally: self.sessionState.remove("username")
class ProxyGetData: def __activate__(self, context): self.velocityContext = context formData = self.vc("formData") # build the URL and query parameters to retrieve proxyUrls = JsonSimple(self.vc("systemConfig").getObject("proxy-urls")) url = "" key = formData.get("ns", "") if proxyUrls.getJsonObject().containsKey(key): url = proxyUrls.getString("", [key]) queryStr = formData.get("qs") if queryStr == "searchTerms={searchTerms}": queryStr = None if queryStr: if formData.get("jaffa2autocomplete", "false") == "true": url += "?searchTerms=%s" % queryStr.lower() else: url += "?%s" % queryStr self.vc("log").debug("Proxy URL = '{}'", url) data = None try: data = self.__wget(url) except Exception, e: data = '{"error":"%s"}' % str(e) self.vc("log").error("ERROR accessing URL:", e) try: # parse json to check well-formedness json = JsonSimple(data).getJsonObject() # format for jquery.autocomplete if formData.get("autocomplete", "false") == "true": rows = [] fields = formData.get("fields", "").split(",") results = json["results"] for result in results: row = "" for field in fields: if row != "": row += "::" value = result.get(field) if value is None: value = result["result-metadata"]["all"].get(field) if isinstance(value, JSONArray): value = ",".join(value) #self.vc("log").debug(" *** value from all: {}", value) if value: row += value else: row += "*" rows.append(row) if len(rows) > 0: data = "\n".join(rows) else: data = "" except Exception, e: data = '{"error":"%s"}' % str(e) self.vc("log").error("ERROR invalid JSON:", e)
def listHomes(self): term = self.request.getParameter("term") writer = self.response.getPrintWriter( "application/json; charset=UTF-8") homeJsonBlock = JsonSimple(self.homeConfigFile) writer.println("[") count = 0 for homeObj in homeJsonBlock.getArray("institutions"): if term is not None: if homeObj.get("name").lower().find(term.lower()) > -1: count = count + 1 self.printHome(writer, homeObj, count) else: count = count + 1 self.printHome(writer, homeObj, count) writer.println("]") writer.close()
def __upgrade(self, formData): # These fields are handled specially ignoredFields = ["metaList", "redbox:formVersion", "redbox:newForm"] # Prepare a new JSON setup for upgraded data newJsonSimple = JsonSimple() newJsonObject = newJsonSimple.getJsonObject() metaList = newJsonSimple.writeArray(["metaList"]) oldJsonObject = formData.getJsonObject() for key in oldJsonObject.keySet(): oldField = str(key) if oldField not in ignoredFields: newField = self.__parseFieldName(oldField) metaList.add(newField) newJsonObject.put(newField, oldJsonObject.get(key)) # Form management newJsonObject.put("redbox:formVersion", self.redboxVersion) newForm = oldJsonObject.get("redbox:newForm") if newForm is not None: newJsonObject.put("redbox:newForm", newForm) ######### # Some final custom modifications more complicated than most fields ######### # Old URL checkbox 'on' equals new ID Origin 'internal' urlOrigin = oldJsonObject.get("url_useRecordId") if urlOrigin is not None and urlOrigin == "on": newJsonObject.put("dc:identifier.redbox:origin", "internal") # Related data should default to being unlinked if from legacy forms counter = 1 template = "dc:relation.vivo:Dataset" newIdField = "%s.%s.dc:identifier" % (template, counter) while newJsonObject.containsKey(newIdField): newOriginField = "%s.%s.redbox:origin" % (template, counter) newJsonObject.put(newOriginField, "external") newPublishField = "%s.%s.redbox:publish" % (template, counter) newJsonObject.put(newPublishField, "off") counter += 1 newIdField = "%s.%s.dc:identifier" % (template, counter) self.audit.add("Migration tool. Version upgrade performed '%s' => '%s'" % (self.version, self.redboxVersion)) return newJsonSimple