def __createFromSelected(self): self.vc("log").debug("Creating package from selected...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() #self.vc("log").debug("packageType = '{}'", packageType) #self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) # if modifying existing manifest, we already have an identifier, # otherwise create a new one manifestId = self.__getActiveManifestId() if manifestId is None: manifestHash = "%s.tfpackage" % uuid.uuid4() else: manifestHash = self.__getActiveManifestPid() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") manifest = self.__getActiveManifest() oldType = manifest.getType() if oldType is None: manifest.setType(packageType) else: manifest.setType(oldType) #self.vc("log").debug("Manifest: {}", manifest) outWriter.write(manifest.toString(True)) outWriter.close() try: if manifestId is None: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, and make sure they are both deployed workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) rulesFile = self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() else: # update existing object object = StorageUtils.getDigitalObject(Services.getStorage(), manifestId) manifestStream = FileUtils.openInputStream(manifestFile) StorageUtils.createOrUpdatePayload(object, manifestHash, manifestStream) manifestStream.close() object.close() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def __createNew(self): self.vc("log").debug("Creating a new package...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() self.vc("log").debug("packageType = '{}'", packageType) self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) manifestHash = "%s.tfpackage" % uuid.uuid4() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") self.vc("sessionState").set("package/active", None) manifest = self.__getActiveManifest() manifest.setType(packageType) metaList = list(self.vc("formData").getValues("metaList")) jsonObj = manifest.getJsonObject() for metaName in metaList: value = self.vc("formData").get(metaName) jsonObj.put(metaName, value) if self.vc("formData").getValues("sequencesMetaList") != None: sequenceService = ApplicationContextProvider.getApplicationContext().getBean("sequenceService") sequencesMetaList = list(self.vc("formData").getValues("sequencesMetaList")) for sequenceInfo in sequencesMetaList: sequenceInfoJson = JsonSimple(sequenceInfo) sequenceIndex = sequenceService.getSequence(sequenceInfoJson.getString(None,"sequenceName")) jsonObj.put(sequenceInfoJson.getString(None,"metadataName"), String.format(sequenceInfoJson.getString(None,"stringFormat"),sequenceIndex)) self.vc("log").debug("json object created is: %r" % jsonObj) outWriter.write(manifest.toString(True)) outWriter.close() # adding ability to set access plugin accessPlugin = self.vc("formData").get("access_plugin", "derby") if accessPlugin is not None: self.vc("page").authentication.set_access_plugin(accessPlugin) try: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, creating if necessary workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def updateObjectMetadata(self, objectMetaData, toWorkflowId): packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile( toWorkflowId) workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = File(workflowsDir, jsonConfigFile) configObject = StorageUtils.checkHarvestFile(self.storage, configFile) if configObject is None: oid = StorageUtils.generateOid(configFile) configObject = StorageUtils.getDigitalObject(self.storage, oid) objectMetaData.setProperty("jsonConfigPid", jsonConfigFile) objectMetaData.setProperty("jsonConfigOid", configObject.getId()) configJson = JsonSimple(configFile) rulesFileName = configJson.getString(None, "indexer", "script", "rules") rulesFile = File(workflowsDir, rulesFileName) rulesObject = StorageUtils.checkHarvestFile(self.storage, rulesFile) if rulesObject is None: oid = StorageUtils.generateOid(rulesFile) rulesObject = StorageUtils.getDigitalObject(self.storage, oid) objectMetaData.setProperty("rulesPid", rulesFileName) objectMetaData.setProperty("rulesOid", rulesObject.getId()) objectMetaData.setProperty("workflowTransitioned", "true") objectMetaData.setProperty( "date_transitioned", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
def __ingestJson(self, fileName, jsonObject, move): if self.configFile is None: self.configFile = FascinatorHome.getPathFile( "harvest/workflows/dataset.json") harvester = None try: ## Cache the file out to disk... although requires it ## .tfpackage extension due to jsonVelocity transformer jsonPath = self.pTemp(fileName) jsonFile = open(jsonPath, "wb") jsonFile.write(jsonObject.toString(True)) jsonFile.close() ## Now instantiate a HarvestClient just for this File. harvester = HarvestClient(self.configFile, File(jsonPath), "guest") harvester.start() ## And cleanup afterwards oid = harvester.getUploadOid() self.log.info("Harvested alert '{}' to '{}'", fileName, oid) if move: shutil.move(self.pBase(fileName), self.pDone(fileName)) return True except Exception, e: ## TODO: This block looks to just be a copy of the ## top-level one, yet it runs per ROW, not for the ## whole File. Just the JSON data should be stored ## Move the CSV to the 'failed' directory shutil.move(self.pBase(fileName), self.pFail(fileName)) ## And write our error data to disk beside it self.writeError(fileName, e) return False
def updateObjectMetadata(self, objectMetaData, toWorkflowId): packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile(toWorkflowId) workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = File(workflowsDir, jsonConfigFile) configObject = StorageUtils.checkHarvestFile(self.storage, configFile); if configObject is None: oid = StorageUtils.generateOid(configFile); configObject = StorageUtils.getDigitalObject(self.storage, oid); objectMetaData.setProperty("jsonConfigPid", jsonConfigFile) objectMetaData.setProperty("jsonConfigOid", configObject.getId()) configJson = JsonSimple(configFile) rulesFileName = configJson.getString(None, "indexer","script","rules") rulesFile = File(workflowsDir,rulesFileName) rulesObject = StorageUtils.checkHarvestFile(self.storage, rulesFile); if rulesObject is None: oid = StorageUtils.generateOid(rulesFile); rulesObject = StorageUtils.getDigitalObject(self.storage, oid); objectMetaData.setProperty("rulesPid", rulesFileName) objectMetaData.setProperty("rulesOid", rulesObject.getId()) objectMetaData.setProperty("workflowTransitioned", "true") objectMetaData.setProperty("date_transitioned", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
def __ingestJson(self, fileName, jsonObject, move): if self.configFile is None: self.configFile = FascinatorHome.getPathFile("harvest/workflows/dataset.json") harvester = None try: ## Cache the file out to disk... although requires it ## .tfpackage extension due to jsonVelocity transformer jsonPath = self.pTemp(fileName) jsonFile = open(jsonPath, "wb") jsonFile.write(jsonObject.toString(True)) jsonFile.close() ## Now instantiate a HarvestClient just for this File. harvester = HarvestClient(self.configFile, File(jsonPath), "guest") harvester.start() ## And cleanup afterwards oid = harvester.getUploadOid() self.log.info("Harvested alert '{}' to '{}'", fileName, oid) if move: shutil.move(self.pBase(fileName), self.pDone(fileName)) return True except Exception, e: ## TODO: This block looks to just be a copy of the ## top-level one, yet it runs per ROW, not for the ## whole File. Just the JSON data should be stored ## Move the CSV to the 'failed' directory shutil.move(self.pBase(fileName), self.pFail(fileName)) ## And write our error data to disk beside it self.writeError(fileName, e) return False
def __createNew(self): self.vc("log").debug("Creating a new package...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() #self.vc("log").debug("packageType = '{}'", packageType) #self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) manifestHash = "%s.tfpackage" % uuid.uuid4() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") self.vc("sessionState").set("package/active", None) manifest = self.__getActiveManifest() manifest.setType(packageType) metaList = list(self.vc("formData").getValues("metaList")) jsonObj = manifest.getJsonObject() for metaName in metaList: value = self.vc("formData").get(metaName) jsonObj.put(metaName, value) outWriter.write(manifest.toString(True)) outWriter.close() try: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, creating if necessary workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) log.error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def __prepareAlertFromOldConfig(self, alertsPath): return { "name": "Default alert", "path": alertsPath, "harvestConfig": FascinatorHome.getPathFile("harvest/workflows/dataset.json"), "handlers": { "csv": "CSVAlertHandler", "xml": "XMLAlertHandler", "rif": "XMLAlertHandler" }, "CSVAlertHandler-params": { "configMap": { "csv": { "Dialect": { "skipinitialspace": true, "quotechar": "\"", "delimiter": "," }, "hasHeaderRow": true, "FieldMap": { 0: ["title", "redbox:submissionProcess.dc:title"], 1: [ "description", "redbox:submissionProcess.dc:description" ], 2: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:name", 3: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:phone", 4: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:mbox", 5: "workflow_source", 6: "redbox:submissionProcess.skos:note" } } } }, "XMLAlertHandler-params": { "configMap": { "xml": { "xmlMap": self.config.getObject(None, ["alerts", "xmlMaps", "xml"]) }, "rif": { "xmlMap": self.config.getObject(None, ["alerts", "xmlMaps", "rif"]) }, } } }
def __activate__(self, context): self.velocityContext = context self.vc("sessionState").remove("fq") self.__myPlans = None self.__sharedPlans = None self.__myDrafts = None self.__myDatasets = None self.__stages = JsonSimple(FascinatorHome.getPathFile("harvest/workflows/dataset.json")).getArray("stages") self.__search()
def __activate__(self, context): self.velocityContext = context self.vc("sessionState").remove("fq") self.__myPlans = None self.__sharedPlans = None self.__myDrafts = None self.__myDatasets = None self.__stages = JsonSimple( FascinatorHome.getPathFile( "harvest/workflows/dataset.json")).getArray("stages") self.__search()
def __prepareAlertFromOldConfig(self, alertsPath): return { "name": "Default alert", "path": alertsPath, "harvestConfig": FascinatorHome.getPathFile("harvest/workflows/dataset.json"), "handlers": { "csv": "CSVAlertHandler", "xml": "XMLAlertHandler", "rif": "XMLAlertHandler" }, "CSVAlertHandler-params": { "configMap": { "csv": { "Dialect": { "skipinitialspace": true, "quotechar": "\"", "delimiter": "," }, "hasHeaderRow": true, "FieldMap": { 0: ["title","redbox:submissionProcess.dc:title"], 1: ["description", "redbox:submissionProcess.dc:description"], 2: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:name", 3: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:phone", 4: "redbox:submissionProcess.locrel:prc.foaf:Person.foaf:mbox", 5: "workflow_source", 6: "redbox:submissionProcess.skos:note" } } } }, "XMLAlertHandler-params": { "configMap": { "xml": {"xmlMap": self.config.getObject(None, ["alerts", "xmlMaps", "xml"])}, "rif": {"xmlMap": self.config.getObject(None, ["alerts", "xmlMaps", "rif"])}, } } }
def getDisplayList(self): return JsonSimple(FascinatorHome.getPathFile(os.path.join("system-files", "package-arms", "preview-fields.json")))
def __createNew(self): self.vc("log").debug("Creating a new package...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() self.vc("log").debug("packageType = '{}'", packageType) self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) manifestHash = "%s.tfpackage" % uuid.uuid4() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") self.vc("sessionState").set("package/active", None) manifest = self.__getActiveManifest() manifest.setType(packageType) metaList = list(self.vc("formData").getValues("metaList")) jsonObj = manifest.getJsonObject() for metaName in metaList: value = self.vc("formData").get(metaName) jsonObj.put(metaName, value) if self.vc("formData").getValues("sequencesMetaList") != None: sequenceService = ApplicationContextProvider.getApplicationContext( ).getBean("sequenceService") sequencesMetaList = list( self.vc("formData").getValues("sequencesMetaList")) for sequenceInfo in sequencesMetaList: sequenceInfoJson = JsonSimple(sequenceInfo) sequenceIndex = sequenceService.getSequence( sequenceInfoJson.getString(None, "sequenceName")) jsonObj.put( sequenceInfoJson.getString(None, "metadataName"), String.format( sequenceInfoJson.getString(None, "stringFormat"), sequenceIndex)) self.vc("log").debug("json object created is: %r" % jsonObj) outWriter.write(manifest.toString(True)) outWriter.close() # adding ability to set access plugin accessPlugin = self.vc("formData").get("access_plugin", "derby") if accessPlugin is not None: self.vc("page").authentication.set_access_plugin(accessPlugin) try: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, creating if necessary workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def __search(self): indexer = Services.getIndexer() portalQuery = Services.getPortalManager().get( self.vc("portalId")).getQuery() portalSearchQuery = Services.getPortalManager().get( self.vc("portalId")).getSearchQuery() # Security prep work current_user = self.vc("page").authentication.get_username() security_roles = self.vc("page").authentication.get_roles_list() security_filter = 'security_filter:("' + '" OR "'.join( security_roles) + '")' security_exceptions = 'security_exception:"' + current_user + '"' owner_query = 'owner:"' + current_user + '"' security_query = "(" + security_filter + ") OR (" + security_exceptions + ") OR (" + owner_query + ")" isAdmin = self.vc("page").authentication.is_admin() req = SearchRequest("*:*") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.addParam("fq", "") req.setParam("rows", "0") req.setParam("facet", "true") req.setParam("facet.field", "workflow_step") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) steps = SolrResult(ByteArrayInputStream(out.toByteArray())) self.__steps = steps.getFacets().get("workflow_step") wfConfig = JsonSimple( FascinatorHome.getPathFile("harvest/workflows/dataset.json")) jsonStageList = wfConfig.getJsonSimpleList(["stages"]) stages = [] for jsonStage in jsonStageList: wfStage = WorkflowStage(jsonStage, self.__steps) stages.append(wfStage) self.__stages = stages req = SearchRequest("*:*") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.addParam("fq", "") req.setParam("rows", "25") req.setParam("sort", "last_modified desc, f_dc_title asc") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__result = SolrResult(ByteArrayInputStream(out.toByteArray())) req.addParam("fq", "workflow_step:%s" % stages[0].getName()) out = ByteArrayOutputStream() indexer.search(req, out) self.__alerts = SolrResult(ByteArrayInputStream(out.toByteArray())) req = SearchRequest( "last_modified:[NOW-1MONTH TO *] AND workflow_step:live") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.setParam("rows", "10") req.setParam("sort", "last_modified desc, f_dc_title asc") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__latest = SolrResult(ByteArrayInputStream(out.toByteArray())) self.vc("sessionState").set("fq", 'item_type:"object"')
def __search(self): indexer = Services.getIndexer() portalQuery = Services.getPortalManager().get(self.vc("portalId")).getQuery() portalSearchQuery = Services.getPortalManager().get(self.vc("portalId")).getSearchQuery() # Security prep work current_user = self.vc("page").authentication.get_username() security_roles = self.vc("page").authentication.get_roles_list() security_filter = 'security_filter:("' + '" OR "'.join(security_roles) + '")' security_exceptions = 'security_exception:"' + current_user + '"' owner_query = 'owner:"' + current_user + '"' security_query = "(" + security_filter + ") OR (" + security_exceptions + ") OR (" + owner_query + ")" isAdmin = self.vc("page").authentication.is_admin() req = SearchRequest("*:*") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.addParam("fq", "") req.setParam("rows", "0") req.setParam("facet", "true") req.setParam("facet.field", "workflow_step") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) steps = SolrResult(ByteArrayInputStream(out.toByteArray())) self.__steps = steps.getFacets().get("workflow_step") wfConfig = JsonSimple(FascinatorHome.getPathFile("harvest/workflows/dataset.json")) jsonStageList = wfConfig.getJsonSimpleList(["stages"]) stages = [] for jsonStage in jsonStageList: wfStage = WorkflowStage(jsonStage, self.__steps) stages.append(wfStage) self.__stages = stages req = SearchRequest("*:*") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.addParam("fq", "") req.setParam("rows", "25") req.setParam("sort", "last_modified desc, f_dc_title asc"); if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__result = SolrResult(ByteArrayInputStream(out.toByteArray())) req.addParam("fq", "workflow_step:%s" % stages[0].getName()) out = ByteArrayOutputStream() indexer.search(req, out) self.__alerts = SolrResult(ByteArrayInputStream(out.toByteArray())) req = SearchRequest("last_modified:[NOW-1MONTH TO *] AND workflow_step:live") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.setParam("rows", "10") req.setParam("sort", "last_modified desc, f_dc_title asc"); if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__latest = SolrResult(ByteArrayInputStream(out.toByteArray())) self._searchEmbargoes() self.vc("sessionState").set("fq", 'item_type:"object"')
def __search(self): indexer = self.services.getIndexer() portalQuery = self.services.getPortalManager().get(self.vc("portalId")).getQuery() portalSearchQuery = self.services.getPortalManager().get(self.vc("portalId")).getSearchQuery() # Security prep work current_user = self.vc("page").authentication.get_username() security_roles = self.vc("page").authentication.get_roles_list() security_filter = 'security_filter:("' + '" OR "'.join(security_roles) + '")' security_exceptions = 'security_exception:"' + current_user + '"' owner_query = 'owner:"' + current_user + '"' security_query = "(" + security_filter + ") OR (" + security_exceptions + ") OR (" + owner_query + ")" isAdmin = self.vc("page").authentication.is_admin() req = SearchRequest("last_modified:[NOW-1MONTH TO *]") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.setParam("rows", "10") req.setParam("sort", "last_modified desc, f_dc_title asc"); if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__latest = SolrResult(ByteArrayInputStream(out.toByteArray())) req = SearchRequest(owner_query) req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.setParam("rows", "10") req.setParam("sort", "last_modified desc, f_dc_title asc"); if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__mine = SolrResult(ByteArrayInputStream(out.toByteArray())) req = SearchRequest('workflow_security:"' + current_user + '"') req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.setParam("rows", "10") req.setParam("sort", "last_modified desc, f_dc_title asc"); if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.__workflows = SolrResult(ByteArrayInputStream(out.toByteArray())) req = SearchRequest("*:*") req.setParam("fq", 'item_type:"object"') if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) req.addParam("fq", "") req.setParam("rows", "0") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) self.vc("sessionState").set("fq", 'item_type:"object"') #sessionState.set("query", portalQuery.replace("\"", "'")) # Load in the services UI workflow selfSubmitWfConfig = JsonSimple(FascinatorHome.getPathFile("harvest/workflows/servicesUI.json")) selfSubmitJsonStageList = selfSubmitWfConfig.getJsonSimpleList(["stages"]) servicesStages = [] for jsonStage in selfSubmitJsonStageList: wfStage = WorkflowStage(jsonStage, self.__steps) servicesStages.append(wfStage) self.__selfservicesStages = servicesStages self.__result = SolrResult(ByteArrayInputStream(out.toByteArray()))
def getDisplayList(self): return JsonSimple( FascinatorHome.getPathFile( os.path.join("system-files", "package-arms", "preview-fields.json")))
def __createFromSelected(self): self.vc("log").debug("Creating package from selected...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() #self.vc("log").debug("packageType = '{}'", packageType) #self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) # if modifying existing manifest, we already have an identifier, # otherwise create a new one manifestId = self.__getActiveManifestId() if manifestId is None: manifestHash = "%s.tfpackage" % uuid.uuid4() else: manifestHash = self.__getActiveManifestPid() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") manifest = self.__getActiveManifest() oldType = manifest.getType() if oldType is None: manifest.setType(packageType) else: manifest.setType(oldType) self.vc("log").debug("Manifest: %s" % manifest) outWriter.write(manifest.toString(True)) outWriter.close() try: if manifestId is None: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, and make sure they are both deployed workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) rulesFile = self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() else: # update existing object object = StorageUtils.getDigitalObject(Services.getStorage(), manifestId) manifestStream = FileUtils.openInputStream(manifestFile) StorageUtils.createOrUpdatePayload(object, manifestHash, manifestStream) manifestStream.close() object.close() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'