def write(self, name, file): fname = self.getName(name) entry = cab.CabFileEntry(name=fname, date=Date()) if isinstance(file, ByteArrayOutputStream): file = ByteArrayInputStream(file.toByteArray()) elif isinstance(file, type('')): file = FileInputStream(file) self.cabfile.addStream(file, entry)
def testEscapeBackslash(self): # JSON "xy\\.z" should become String "xy\.z" text = "{ \"abc\": \"xy\\\\.z\" }" stream = ByteArrayInputStream(text.encode('utf-8')) json_translator = JsonStreamTranslator("String", stream) result = json_translator.parse() abc = result['abc'] self.assertEquals("xy\\.z", abc, "Should be single slash")
def _read_image(self, fp): if sys.platform[0:4] == 'java': from javax.imageio import ImageIO from java.io import ByteArrayInputStream input_stream = ByteArrayInputStream(fp.read()) return ImageIO.read(input_stream) elif PILImage: return PILImage.open(fp)
def __activate__(self, context): try: self.log = context["log"] self.response = context["response"] self.request = context["request"] self.systemConfig = context["systemConfig"] self.storage = context["Services"].getStorage() self.indexer = context["Services"].getIndexer() self.sessionState = context["sessionState"] self.sessionState.set("username", "admin") out = self.response.getPrintWriter("text/plain; charset=UTF-8") relationshipMapper = ApplicationContextProvider.getApplicationContext().getBean("relationshipMapper") externalCurationMessageBuilder = ApplicationContextProvider.getApplicationContext().getBean("externalCurationMessageBuilder") oid = self.request.getParameter("oid") if oid is None : identifier = self.request.getParameter("identifier") oid = self.findOidByIdentifier(identifier) relationshipType = self.request.getParameter("relationship") curatedPid = self.request.getParameter("curatedPid") sourceId = self.request.getParameter("sourceIdentifier") system = self.request.getParameter("system") digitalObject = StorageUtils.getDigitalObject(self.storage, oid) metadataJsonPayload = digitalObject.getPayload("metadata.json") metadataJsonInstream = metadataJsonPayload.open() metadataJson = JsonSimple(metadataJsonInstream) metadataJsonPayload.close() relationships = metadataJson.getArray("relationships") found = False for relationship in relationships: if relationship.get("identifier") == sourceId: relationship.put("isCurated",True) relationship.put("curatedPid",curatedPid) found = True if not found: relationship = JsonObject() relationship.put("isCurated",True) relationship.put("curatedPid",curatedPid) relationship.put("relationship",relationshipType) relationship.put("identifier",sourceId) relationship.put("system",system) relationships.add(relationship) out.println(metadataJson.toString(True)) istream = ByteArrayInputStream(String(metadataJson.toString(True)).getBytes()) StorageUtils.createOrUpdatePayload(digitalObject,"metadata.json",istream) out.close() finally: self.sessionState.remove("username")
def __searchExecute(self, search, count): try: search.setParam("start", str(count)) out = ByteArrayOutputStream() self.services.indexer.search(search, out) return SolrResult(ByteArrayInputStream(out.toByteArray())) except Exception, e: self.log.error("Error during search: ", e) return None
def parseFileContent(self, bytes): byteInputStream = ByteArrayInputStream(bytes) reader = InputStreamReader(byteInputStream, self.encoding) parser = csvParser.Parser(reader, self.delimiter) parser.setQuoteSymbol(self.quoteSymbol) parser.setRowToStartIndex(self.rowToStartIndex) return csvParser.Processor().processByParser(parser).getRows()
def _addPropertyValueToTFMeta(self, object, tfMetaPropertyValue): objectMetadata = object.getMetadata() objectMetadata.setProperty("copyTFPackage", tfMetaPropertyValue) objectMetadata.setProperty("render-pending", "true") output = ByteArrayOutputStream() objectMetadata.store(output, None) input = ByteArrayInputStream(output.toByteArray()) StorageUtils.createOrUpdatePayload(object, "TF-OBJ-META", input)
def _getXmlRootFromString(self, xmlString): """ Parses string xml representation and returns root element str->Element @raise JavaException: XML parsing failed """ xmlString = ''.join([line.strip() for line in xmlString.split('\n') if line]) strContent = String(xmlString) return SAXBuilder().build(ByteArrayInputStream(strContent.getBytes('utf-8'))).getRootElement()
def from_json(json_object): if not ITEMSTACK_JSON_NAME in json_object: return json_object raw_bytes = binascii.unhexlify(json_object[ITEMSTACK_JSON_NAME]) byte_array_in = ByteArrayInputStream(raw_bytes) bukkit_in = BukkitObjectInputStream(byte_array_in) bukkit_item = bukkit_in.readObject() bukkit_in.close() return bukkit_item
def test_serialization(self): s = set(range(5, 10)) output = ByteArrayOutputStream() serializer = ObjectOutputStream(output) serializer.writeObject(s) serializer.close() input = ByteArrayInputStream(output.toByteArray()) unserializer = ObjectInputStream(input) self.assertEqual(s, unserializer.readObject())
def getAttachments(self): attachmentType = "review-attachments" req = SearchRequest("attached_to:%s AND attachment_type:%s" % (self.oid, attachmentType)) req.setParam("rows", "1000") out = ByteArrayOutputStream() self.Services.indexer.search(req, out) response = SolrResult(ByteArrayInputStream(out.toByteArray())) return response.getResults()
def serialize(o, special=False): b = ByteArrayOutputStream() objs = ObjectOutputStream(b) objs.writeObject(o) if not special: OIS = ObjectInputStream else: OIS = PythonObjectInputStream objs = OIS(ByteArrayInputStream(b.toByteArray())) return objs.readObject()
def findPackagesToTransition(self, fromWorkflowId, fromWorkflowStage): req = SearchRequest("workflow_id:" + fromWorkflowId + " AND _query_:\"workflow_step:" + fromWorkflowStage + "\"") req.setParam("fq", "owner:[* TO *]") req.setParam("fq", "security_filter:[* TO *]") out = ByteArrayOutputStream() self.indexer.search(req, out) solrResult = SolrResult(ByteArrayInputStream(out.toByteArray())) return solrResult.getResults()
def findPackagesToPurge(self, packageType): req = SearchRequest("display_type:" + packageType + " AND date_object_created:[* TO NOW-7DAY]") req.setParam("fq", "owner:[* TO *]") req.setParam("fq", "security_filter:[* TO *]") req.setParam("fl", "storage_id,date_object_created,date_object_modified") out = ByteArrayOutputStream() self.indexer.search(req, out) solrResult = SolrResult(ByteArrayInputStream(out.toByteArray())) return solrResult.getResults()
def __loadSolrData(self, oid): portal = self.vc("page").getPortal() query = 'id:"%s"' % oid if portal.getSearchQuery(): query += " AND " + portal.getSearchQuery() req = SearchRequest(query) req.addParam("fq", 'item_type:"object"') req.addParam("fq", portal.getQuery()) out = ByteArrayOutputStream() self.vc("Services").getIndexer().search(req, out) return SolrResult(ByteArrayInputStream(out.toByteArray()))
def _buildDocumentForXpath(content, namespaceAware=1): r'@types: str, int -> org.w3c.dom.Document' xmlFact = DocumentBuilderFactory.newInstance() xmlFact.setNamespaceAware(namespaceAware) xmlFact.setValidating(0) xmlFact.setFeature("http://xml.org/sax/features/namespaces", 0) xmlFact.setFeature("http://xml.org/sax/features/validation", 0) xmlFact.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", 0) xmlFact.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", 0) builder = xmlFact.newDocumentBuilder() return builder.parse(ByteArrayInputStream(String(content).getBytes()))
def _find_locations(self): if not self._location_map is None: return tracker = Tracker() for sb in self._source_bytes: cu = JavaParser.parse(ByteArrayInputStream(sb)) cu.accept(tracker, None) self._location_map = {} for loc in tracker.trackedMethods: self._location_map.setdefault(loc.qualifiedName, []).append(loc)
def __search(self, searchField): indexer = self.services.getIndexer() portalQuery = self.services.getPortalManager().get( self.portal.getName()).getQuery() portalSearchQuery = self.services.getPortalManager().get( self.portal.getName()).getSearchQuery() # Security prep work current_user = self.page.authentication.get_username() security_roles = self.page.authentication.get_roles_list() security_filter = 'security_filter:("' + '" OR "'.join( security_roles) + '")' security_exceptions = 'security_exception:"' + current_user + '"' owner_query = 'owner:"' + current_user + '"' security_query = "(" + security_filter + ") OR (" + security_exceptions + ") OR (" + owner_query + ")" startRow = 0 numPerPage = 25 numFound = 0 req = SearchRequest(searchField) if portalQuery: req.addParam("fq", portalQuery) if portalSearchQuery: req.addParam("fq", portalSearchQuery) if not self.page.authentication.is_admin(): req.addParam("fq", security_query) objectIdList = [] while True: req.addParam("fq", 'item_type:"object"') req.addParam("rows", str(numPerPage)) req.addParam("start", str(startRow)) out = ByteArrayOutputStream() indexer.search(req, out) result = JsonSimpleConfig(ByteArrayInputStream(out.toByteArray())) docs = result.getJsonList("response", "docs") docIds = [] for doc in docs: docId = doc.getString(None, "storage_id") if docId is not None: docIds.append(docId) objectIdList.extend(docs) startRow += numPerPage numFound = int(result.getString(None, "response", "numFound")) if (startRow > numFound): break return objectIdList
def __change(self, context, oid, new_owner): storage = self.services.getStorage() object = storage.getObject(oid) objectMetadata = object.getMetadata() owner = objectMetadata.getProperty("owner") objectMetadata.setProperty("owner", new_owner) self.log.debug("grantAccess.py: Changed ownership from {} to {} ", owner, new_owner) output = ByteArrayOutputStream() objectMetadata.store(output, None) input = ByteArrayInputStream(output.toByteArray()) StorageUtils.createOrUpdatePayload(object, "TF-OBJ-META", input) try: auth = context["page"].authentication source = context["formData"].get("source") self.log.debug("grantAccess.py: authentication plugin: = {}", source) auth.set_access_plugin(source) # special condition when setting admin as owner - revoke all viewers if new_owner == "admin": viewers = self.getViewers(oid, owner) self.log.debug( "grantAccess.py: New owner is admin, revoking all viewers") self.log.debug("grantAccess.py: Viewers: " + viewers.toString()) for viewer in viewers: self.log.debug("Revoking:%s" % viewer) auth.revoke_user_access(oid, viewer) # when there are viewers, the previous owner somehow joins the read-only list, revoke access to the previous owner as well. if viewers.size() > 0: auth.revoke_user_access(oid, owner) else: self.log.info( "Grant previous owner {} view access by adding them to security_execption.", owner) auth.grant_user_access( oid, owner) # give previous owner read access err = auth.get_error() if err is None or err == 'Duplicate! That user has already been applied to this record.': Services.indexer.index(oid) Services.indexer.commit() return '{"status":"ok", "new_owner": "' + new_owner + '"}' else: self.log.error( "grantAccess.py: Error raised during calling authentication for changing ownership. Exception: " + err) except Exception, e: self.log.error( "grantAccess.py: Unexpected error raised during changing ownership of data. Exception: " + str(e))
def downloadFile(context, main, add, filterinfo, session, elementId, recordId): print 'Save xform data from Celesta Python procedure.' print 'User %s' % context.userId print 'main "%s".' % main print 'add "%s".' % add print 'filterinfo "%s".' % filterinfo print 'session "%s".' % session print 'elementId "%s".' % elementId print 'recordId "%s".' % recordId fileName = 'test.txt' data = String('grid data') return JythonDownloadResult(ByteArrayInputStream(data.getBytes()),fileName)
def __blob(self, obj=0): assert self.has_table("blobtable"), "no blob table" tabname, sql = self.table("blobtable") fn = tempfile.mktemp() fp = None c = self.cursor() try: hello = ("hello", ) * 1024 c.execute(sql) self.db.commit() from java.io import FileOutputStream, FileInputStream, ObjectOutputStream, ObjectInputStream, ByteArrayInputStream fp = FileOutputStream(fn) oos = ObjectOutputStream(fp) oos.writeObject(hello) fp.close() fp = FileInputStream(fn) blob = ObjectInputStream(fp) value = blob.readObject() fp.close() assert hello == value, "unable to serialize properly" if obj == 1: fp = open(fn, "rb") else: fp = FileInputStream(fn) c.execute("insert into %s (a, b) values (?, ?)" % (tabname), [(0, fp)], {1: zxJDBC.BLOB}) self.db.commit() c.execute("select * from %s" % (tabname)) f = c.fetchall() bytes = f[0][1] blob = ObjectInputStream(ByteArrayInputStream(bytes)).readObject() assert hello == blob, "blobs are not equal" finally: c.execute("drop table %s" % (tabname)) c.close() self.db.commit() if os.path.exists(fn): if fp: fp.close() os.remove(fn)
def __getMetadata(self, oid): req = SearchRequest('id:%s' % oid) req.setParam("fq", 'item_type:"object"') # Make sure 'fq' has already been set in the session ##security_roles = self.authentication.get_roles_list(); ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")' ##req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer = self.services.getIndexer() indexer.search(req, out) result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray())) #self.log.info("result={}", result.toString()) return result.getJsonList("response/docs").get(0)
def __search(self): indexer = self.services.getIndexer() # Security prep work isAdmin = self.vc("page").authentication.is_admin() if not isAdmin: print "ERROR: User is not an admin '" return None req = SearchRequest('eventType:harvestStart') req.setParam("rows", "100") out = ByteArrayOutputStream() indexer.searchByIndex(req, out, "eventLog") self.__harvestList = SolrResult(ByteArrayInputStream( out.toByteArray()))
def _searchSets(self, indexer, searchType, isAdmin=True, security_query=''): req = SearchRequest("packageType:" + searchType) req.setParam("fq", 'item_type:"object"') req.addParam("fq", "") req.setParam("sort", "last_modified desc, f_dc_title asc") if not isAdmin: req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer.search(req, out) return SolrResult(ByteArrayInputStream(out.toByteArray()))
def __searchSolr(self): query = "handle:http* AND item_type:object" req = SearchRequest(query) req.setParam("rows", "99999") req.setParam("fl", "id,dc_title,handle,repository_type,repository_name") req.setParam("sort", "handle asc") req.setParam("facet", "true") req.setParam("facet.field", "repository_type,repository_name") out = ByteArrayOutputStream() self.services.indexer.search(req, out) return SolrResult(ByteArrayInputStream(out.toByteArray()))
def resolveEntity(self, name, publicId, baseURI, systemId): logger.debug("XMLExternalEntityResolver resolveEntity, name : ", name, ", publicId: ", publicId, ", baseURI: ", baseURI, ", systemId: ", systemId) try: filename = systemId logger.debug('resolveEntity, file name: ', filename, ", path: ", self.remotePath) strContent = String( self.fileMonitor.getFileContent(self.remotePath + self.fsSeparator + filename)) return InputSource(ByteArrayInputStream(strContent.getBytes())) except Exception, ex: logger.debug("XMLExternalEntityResolver Exception: ", ex)
def findOidByIdentifier(self, identifier): query = "known_ids:\"" + identifier + "\"" request = SearchRequest(query) out = ByteArrayOutputStream() # Now search and parse response result = None try: self.indexer.search(request, out) inputStream = ByteArrayInputStream(out.toByteArray()) result = SolrResult(inputStream) except Exception, ex: self.log.error("Error searching Solr: ", ex) raise ex return None
def _searchSets(self, startPage=1): req = SearchRequest(self.getQuery()) req.setParam("fq", 'item_type:"object"') req.setParam("rows", str(self.getRecordsPerPage())) req.setParam("start", str((startPage - 1) * self.getRecordsPerPage())) req.addParam("fq", self.getFilterQuery()) req.setParam("fl", self.getReturnFields()) req.setParam("sort", "date_object_modified desc, f_dc_title asc") if not self.isAdmin(): req.addParam("fq", self.getSecurityQuery()) out = ByteArrayOutputStream() self.indexer.search(req, out) result = SolrResult(ByteArrayInputStream(out.toByteArray())) self._setPaging(result.getNumFound()) result.getJsonObject().put("lastPage", str(self.paging.getLastPage())) result.getJsonObject().put("curPage", str(startPage)) return result
def __getAuthorDetails(self, authorIds): query = " OR id:".join(authorIds) req = SearchRequest('id:%s' % query) req.setParam("fq", 'recordtype:"author"') req.addParam("fq", 'item_type:"object"') req.setParam("rows", "9999") # Make sure 'fq' has already been set in the session ##security_roles = self.authentication.get_roles_list(); ##security_query = 'security_filter:("' + '" OR "'.join(security_roles) + '")' ##req.addParam("fq", security_query) out = ByteArrayOutputStream() indexer = self.services.getIndexer() indexer.search(req, out) result = JsonConfigHelper(ByteArrayInputStream(out.toByteArray())) return result.getJsonList("response/docs")
def loadXmlFile(self, path, container=None, fileContent=None): 'str, osh, str -> Document' saxBuilder = SAXBuilder() globalSettings = GeneralSettingsConfigFile.getInstance() #loadExternalDTD = globalSettings.getPropertyBooleanValue('loadExternalDTD', 1) loadExternalDTD = 1 saxBuilder.setFeature( "http://apache.org/xml/features/nonvalidating/load-external-dtd", loadExternalDTD) logger.debug("loadXmlFile, loadExternalDTD: ", loadExternalDTD, ", path: ", path) if loadExternalDTD: saxBuilder.setEntityResolver( XMLExternalEntityResolver(self.fileMonitor, str(path), self.shellUtils)) saxBuilder.setFeature( "http://xml.org/sax/features/use-entity-resolver2", 1) doc = None try: fileContent = fileContent or self.fileMonitor.getFileContent(path) if fileContent: try: strContent = String(fileContent) strContent = String( strContent.substring(0, strContent.lastIndexOf('>') + 1)) doc = saxBuilder.build( ByteArrayInputStream(strContent.getBytes())) if container is not None: cfOSH = self.createCF(container, path, fileContent) if cfOSH is not None: self.OSHVResult.add(cfOSH) except: logger.debugException('Failed to load xml file:', path) excMsg = traceback.format_exc() logger.debug(excMsg) except: logger.debugException('Failed to get content of file:', path) excMsg = traceback.format_exc() logger.debug(excMsg) return doc
def __getFeed(self): portal = self.services.getPortalManager().get(self.portalId) req = SearchRequest("*:*") req.setParam("facet", "true") req.setParam("rows", "1000") req.setParam("facet.field", portal.facetFieldList) req.setParam("facet.sort", "true") req.setParam("facet.limit", str(portal.facetCount)) req.setParam("sort", "f_dc_title asc") req.setParam("fq", 'item_type:"object"') portalQuery = portal.getQuery() if portalQuery: req.addParam("fq", portalQuery) out = ByteArrayOutputStream() self.services.getIndexer().search(req, out) return JsonConfigHelper(ByteArrayInputStream(out.toByteArray()))
def create(): """ A POST method that creates an Annotation (A-1) object based on a number of parameters. Required Parameters: source_uri: The URI for the whole target object dc_title: Dublin Core title associated with the annotation, i.e. "dublin core title goes here" body_inline Plain text string to store as the body OR body_content: Contents of the body (XML, text, json, etc.) AND body_mimetype: Mimetype of the body_content OR body_uri: URI pointing to the body of the annotation Optional Parameters: annotator: A string representing a user ID (0 or more) ie. 'Charly' generator: A string representing what generated the annotation ie. 'Web Client' oax_style_uri: A URI for a XSLT stylesheet used to render the whole target object. (0 or 1) oa_selector: A string with the selector value(0 or 1) oa_selector_type_uri: Required if an oa_selector is passed in ie. oa:Fragment fragment_type: URI describing the oa_selector type Optional and only used if an oa_selector is passed in. ie. 'http://www.w3.org/TR/xpath/' body_content_model: A string representing the body's content model ie. 'tei-annotation' Will create 1 or 2 Fedora objects. One will represent the actual annotation (A-1) and one will be the body of text that annotates the Fedora object (B-1). >>> import urllib >>> import urllib2 >>> post_url = "http://localhost:5000/create" >>> params = { "source_uri" : "test:1#xpointer('/foo')", "body_content" : "<TEI><body>text body</body></TEI>", "body_mimetype" : "text/xml", "dc_title" : "Open Annotation Collaboration Annotation object (A-1)" } >>> encoded_data = urllib.urlencode( params ) >>> request = urllib2.Request( post_url, encoded_data ) >>> response = urllib2.urlopen( request ) >>> print response.read() { "errors": [], "body_pid": "changeme:180", "annotation_pid": "changeme:181" } """ try: annote = Annotation(source_uri = request.form.get('source_uri'), dc_title = request.form.get('dc_title'), annotated = datetime.utcnow(), body_inline = request.form.get('body_inline', None), body_content = request.form.get('body_content', None), body_mimetype = request.form.get('body_mimetype', None), body_uri = request.form.get('body_uri', None), body_content_model = request.form.get('body_content_model', None), annotator = request.form.get('annotator', None), generator = request.form.get('generator', None), oax_style_uri = request.form.get('oax_style_uri', None), oa_selector = request.form.get('oa_selector', None), oa_selector_type_uri = request.form.get('oa_selector_type_uri', None), fragment_type = request.form.get('fragment_type', None)) annote.create() annote.submit() if annote.validate(): # Start dataset transaction dataset = TDBFactory.createDataset(app.config['STORE_LOCATION']) dataset.begin(ReadWrite.WRITE) try: model = dataset.getDefaultModel() model.begin() if annote.annotation_rdf is not None: anno_input_stream = ByteArrayInputStream(String(tostring(annote.annotation_rdf)).getBytes()) model.read(anno_input_stream, None) anno_input_stream.close() if annote.specific_target_rdf_element is not None: spectaget_input_stream = ByteArrayInputStream(String(tostring(annote.specific_target_rdf_element)).getBytes()) model.read(spectaget_input_stream, None) spectaget_input_stream.close() if annote.selector_rdf_element is not None: selector_input_stream = ByteArrayInputStream(String(tostring(annote.selector_rdf_element)).getBytes()) model.read(selector_input_stream, None) selector_input_stream.close() if annote.rels_ext_rdf_element is not None: relsext_input_stream = ByteArrayInputStream(String(tostring(annote.rels_ext_rdf_element)).getBytes()) model.read(relsext_input_stream, None) relsext_input_stream.close() if annote.body_inline_rdf_element is not None: body_inline_input_stream = ByteArrayInputStream(String(tostring(annote.body_inline_rdf_element)).getBytes()) model.read(body_inline_input_stream, None) body_inline_input_stream.close() model.commit() model.close() dataset.commit() except Exception, exc: raise finally: dataset.end() TDB.sync(dataset)
def __attachFile(self): try: # WebKit/IE prefixes C:\fakepath\ with javascript manipulated file inputs uploadFile = self.formData.get("uploadFile") uploadFile = uploadFile.replace("C:\\fakepath\\", "") fileDetails = self.vc("sessionState").get(uploadFile) # Establish that we do have details on the uploaded file if fileDetails is None: uploadFile = uploadFile.rsplit("\\", 1)[-1] fileDetails = self.vc("sessionState").get(uploadFile) if fileDetails is None: self.log.error("**** fileDetails is None!!! ***") return self.__toJson({ "error": "fileDetails is None (no upload file!)" }) self.log.debug("Attach Upload: fileDetails: '{}'", fileDetails) errorDetails = fileDetails.get("error") if errorDetails: self.log.error("ERROR: %s" % errorDetails) return self.__toJson({"error": errorDetails}) # Look for the storage info we need jsonFormData = JsonSimple(self.formData.get("json")) oid = jsonFormData.getString(None, "oid") fname = fileDetails.get("name") foid = fileDetails.get("oid") self.log.debug("attach oid='{}', filename='{}', foid='{}'", [oid, fname, foid]) # Make sure it was actually stored try: attachObj = self.Services.getStorage().getObject(foid) except StorageException, e: return JsonSimple({"error": "Attached file - '%s'" % str(e)}) # Build up some metadata to store alongside the file attachFormData = JsonSimple(self.formData.get("json", "{}")) attachMetadata = { "type": "attachment", "created_by": "workflow.py", "formData": { "oid": foid, "attached_to": oid, "filename": fname, "access_rights": attachFormData.getString("private", ["accessRights"]), "attachment_type": attachFormData.getString("supporting-material", ["attachmentType"]) } } # We are going to send an update on all attachments back with our response attachedFiles = self.__getAttachedFiles(oid) attachedFiles.append(dict(attachMetadata["formData"])) # Now store our metadata for this file try: jsonMetadata = self.__toJson(attachMetadata) jsonIn = ByteArrayInputStream(jsonMetadata.toString()) StorageUtils.createOrUpdatePayload(attachObj, "workflow.metadata", jsonIn) jsonIn.close(); attachObj.close(); except StorageException, e: self.log.error("Failed to create attachment metadata!", e)