def test_output_arg(self): string = String('\u1156\u2278\u3390\u44AB') for btarray in ([0] * 4, (0, ) * 4, jarray(jbyte)([0] * 4)): # This version of getBytes returns the 8 low-order of each Unicode character. string.getBytes(0, 4, btarray, 0) if not isinstance(btarray, tuple): self.assertEquals( btarray, [ctypes.c_int8(x).value for x in [0x56, 0x78, 0x90, 0xAB]])
def __call__(self): """ A Python object is callable if it defines a __call__ method. Each worker thread performs a number of runs of the test script, as configured by the property grinder.runs. For each run, the worker thread calls its TestRunner; thus the __call__ method can be thought of as the definition of a run. """ # Normally test results are reported automatically when the test returns. If you want to # alter the statistics after a test has completed, you must set delayReports = 1 to delay # the reporting before performing the test. This only affects the current worker thread. grinder.statistics.delayReports = 1 msg = te_message.Message( "1.0", "20111020153610", 200, None, "GPE-111", 111, "OPERATOR-111", "00000000001", None, request_msgbody ) # print("******hello" + str(type(msg))) # print("msg.raw_body" + msg.raw_body) # print("des:" + msg.encrypted_body) body = String(msg.encrypted_body) response = request.POST(CONF_url, body.getBytes(), msg.assemble_nvpairs()) code = int(response.getHeader("X-Response-Code")) # verify response status if 200 != code: print("------------------------------------ERROR: %d" % code) # Set success = 0 to mark the test as a failure. grinder.statistics.forLastTest.setSuccess(0) else: print("------------------------------------PASS!")
def __saveManifest(self, oid): object = self.services.getStorage().getObject(oid) sourceId = object.getSourceId() manifestStr = String(self.__manifest.toString()) object.updatePayload( sourceId, ByteArrayInputStream(manifestStr.getBytes("UTF-8"))) object.close()
def __formData(self): # Find our workflow form data packagePid = None try: self.pidList = self.object.getPayloadIdList() for pid in self.pidList: if pid.endswith(self.packagePidSuffix): packagePid = pid except StorageException: self.log.error("Error accessing object PID list for object '{}' ", self.oid) return if packagePid is None: self.log.debug("Object '{}' has no form data", self.oid) return # Retrieve our form data workflowData = None try: payload = self.object.getPayload(packagePid) try: workflowData = JsonSimple(payload.open()) except Exception: self.log.error("Error parsing JSON '{}'", packagePid) finally: payload.close() except StorageException: self.log.error("Error accessing '{}'", packagePid) return # Test our version data self.version = workflowData.getString("{NO VERSION}", ["redbox:formVersion"]) oldData = String(workflowData.toString(True)) if self.version != self.redboxVersion: self.log.info("OID '{}' requires an upgrade: '{}' => '{}'", [self.oid, self.version, self.redboxVersion]) # The version data is old, run our upgrade # function to see if any alterations are # required. Most likely at least the # version number will change. newWorkflowData = self.__upgrade(workflowData) else: newWorkflowData = self.__hotfix(workflowData) if newWorkflowData is not None: self.log.debug("OID '{}' was hotfixed for v1.2 'dc:type' bug", self.oid) else: self.log.debug("OID '{}' requires no work, skipping", self.oid) return # Backup our data first backedUp = self.__backup(oldData) if not backedUp: self.log.error("Upgrade aborted, data backup failed!") return # Save the newly modified data jsonString = String(newWorkflowData.toString(True)) inStream = ByteArrayInputStream(jsonString.getBytes("UTF-8")) try: self.object.updatePayload(packagePid, inStream) except StorageException, e: self.log.error("Error updating workflow payload: ", e)
def __call__(self): """ A Python object is callable if it defines a __call__ method. Each worker thread performs a number of runs of the test script, as configured by the property grinder.runs. For each run, the worker thread calls its TestRunner; thus the __call__ method can be thought of as the definition of a run. """ # Normally test results are reported automatically when the test returns. If you want to # alter the statistics after a test has completed, you must set delayReports = 1 to delay # the reporting before performing the test. This only affects the current worker thread. grinder.statistics.delayReports = 1 msg = te_message.Message("1.0", "20090927113634", 407, None, "GPE-111", 111, "OPERATOR-111", "00000000001", None, request_msgbody) print("******hello" + str(type(msg))) print("msg.raw_body" + msg.raw_body) print("des:" + msg.encrypted_body) body = String(msg.encrypted_body) response = request.POST(CONF_url, body.getBytes(), msg.assemble_nvpairs()) code = int(response.getHeader("X-Response-Code")) # verify response status if 200 != code: print("------------------------------------ERROR: %d" % code) # Set success = 0 to mark the test as a failure. grinder.statistics.forLastTest.setSuccess(0) else: print("------------------------------------PASS!")
def __activate__(self, context): # Prepare variables self.index = context["fields"] self.object = context["object"] self.payload = context["payload"] self.params = context["params"] self.utils = context["pyUtils"] self.config = context["jsonConfig"] self.wfSecurityExceptions = None self.message_list = None # Because the workflow messaging system wants access to this data # BEFORE it actual hits the index we are going to cache it into an # object payload too. self.directIndex = JsonSimple() # Common data self.__newDoc() #print "+++ direct-files.py - itemType='%s'" % self.itemType # Real metadata if self.itemType == "object": self.__previews() self.__basicData() self.__metadata() # Update the 'direct.index' payload - BEFORE messages are sent directString = String(self.directIndex.toString()) inStream = ByteArrayInputStream(directString.getBytes("UTF-8")) try: StorageUtils.createOrUpdatePayload(self.object, "direct.index", inStream) except StorageException, e: print " * direct-files.py : Error updating direct payload" self.__messages() self.__displayType()
def test_store_creation(self): from com.hp.hpl.jena.tdb import TDBFactory from java.io import ByteArrayInputStream from java.lang import String db_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'OAC-testing.tdb') dataset = TDBFactory.createDataset(db_path) # Make sure the store was created assert os.path.isdir(db_path) # Make InputStream triples rdf_text = '<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"><rdf:Description rdf:about="info:fedora/changeme:651"><rdf:type rdf:resource="oa:Annotation"></rdf:type><oa:hasBody xmlns:oa="http://www.w3.org/ns/openannotation/core/" rdf:resource="info:fedora/changeme:650"></oa:hasBody><oa:modelVersion xmlns:oa="http://www.w3.org/ns/openannotation/core/" rdf:resource="http://www.openannotation.org/spec/core/20120509.html"></oa:modelVersion><oa:generated xmlns:oa="http://www.w3.org/ns/openannotation/core/">2012-06-07T03:50:55.993000Z</oa:generated></rdf:Description><rdf:Description rdf:about="info:fedora/changeme:650"><rdf:type rdf:resource="oa:Body"></rdf:type><dc:format xmlns:dc="http://purl.org/dc/elements/1.1/">text/xml</dc:format></rdf:Description></rdf:RDF>' rdfxml = String(rdf_text) input_stream = ByteArrayInputStream(rdfxml.getBytes()) model = dataset.getDefaultModel() model.begin() model.read(input_stream, None) model.commit() model.close() # Were all of the triples added? assert model.size() == 6 shutil.rmtree(db_path) # Was the store removed? assert not os.path.isdir(db_path)
def __saveManifest(self, oid): object = self.services.getStorage().getObject(oid) sourceId = object.getSourceId() manifestStr = String(self.__manifest.toString()) object.updatePayload(sourceId, ByteArrayInputStream(manifestStr.getBytes("UTF-8"))) object.close()
def test_output_arg(self): string = String('\u1156\u2278\u3390\u44AB') for btarray in ([0] * 4, (0,) * 4, jarray(jbyte)([0] * 4)): # This version of getBytes returns the 8 low-order of each Unicode character. string.getBytes(0, 4, btarray, 0) if not isinstance(btarray, tuple): self.assertEqual(btarray, [ctypes.c_int8(x).value for x in [0x56, 0x78, 0x90, 0xAB]]) for method in ["arraySort", "arraySortObject"]: for input in [[], [42], [5, 7, 2, 11, 3]]: with self.subTest(method=method, input=input): l = input.copy() getattr(TA, method)(l) self.assertEqual(sorted(input), l)
def _getXmlRootFromString(self, xmlString): """ Parses string xml representation and returns root element str->Element @raise JavaException: XML parsing failed """ xmlString = ''.join([line.strip() for line in xmlString.split('\n') if line]) strContent = String(xmlString) return SAXBuilder().build(ByteArrayInputStream(strContent.getBytes('utf-8'))).getRootElement()
def __saveWorkflowMetadata(self, oid): object = self.services.getStorage().getObject(oid) manifestStr = String(self.__workflowMetadata.toString()) object.updatePayload("workflow.metadata", ByteArrayInputStream(manifestStr.getBytes("UTF-8"))) object.close() self.__indexer.index(oid) self.__indexer.commit()
def resolveEntity(self, name, publicId, baseURI, systemId): logger.debug("XMLExternalEntityResolver resolveEntity, name : ", name, ", publicId: ", publicId, ", baseURI: ", baseURI, ", systemId: ", systemId ) try: filename = systemId logger.debug('resolveEntity, file name: ', filename, ", path: ", self.remotePath) strContent = String( self.fileMonitor.getFileContent(self.remotePath + self.fsSeparator + filename ) ) return InputSource( ByteArrayInputStream( strContent.getBytes() ) ) except Exception, ex: logger.debug("XMLExternalEntityResolver Exception: ", ex )
def encrypt(self, pystrPlaintext): try: plaintext = JavaString(pystrPlaintext) keyFactory = SecretKeyFactory.getInstance("PBEWithMD5AndDES") key = keyFactory.generateSecret(PBEKeySpec(self.PASSWORD)) pbeCipher = Cipher.getInstance("PBEWithMD5AndDES") paramSpec = PBEParameterSpec(self.SALT, 20) pbeCipher.init(Cipher.ENCRYPT_MODE, key, paramSpec) return self._base64Encode(pbeCipher.doFinal(plaintext.getBytes())) except: raise
def downloadFile(context, main, add, filterinfo, session, elementId, recordId): print 'Save xform data from Celesta Python procedure.' print 'User %s' % context.userId print 'main "%s".' % main print 'add "%s".' % add print 'filterinfo "%s".' % filterinfo print 'session "%s".' % session print 'elementId "%s".' % elementId print 'recordId "%s".' % recordId fileName = 'test.txt' data = String('grid data') return JythonDownloadResult(ByteArrayInputStream(data.getBytes()),fileName)
def resolveEntity(self, name, publicId, baseURI, systemId): logger.debug("XMLExternalEntityResolver resolveEntity, name : ", name, ", publicId: ", publicId, ", baseURI: ", baseURI, ", systemId: ", systemId) try: filename = systemId logger.debug('resolveEntity, file name: ', filename, ", path: ", self.remotePath) strContent = String( self.fileMonitor.getFileContent(self.remotePath + self.fsSeparator + filename)) return InputSource(ByteArrayInputStream(strContent.getBytes())) except Exception, ex: logger.debug("XMLExternalEntityResolver Exception: ", ex)
def loadXmlFile(self, path, container=None, fileContent=None): 'str, osh, str -> Document' saxBuilder = SAXBuilder() globalSettings = GeneralSettingsConfigFile.getInstance() #loadExternalDTD = globalSettings.getPropertyBooleanValue('loadExternalDTD', 1) loadExternalDTD = 1 saxBuilder.setFeature( "http://apache.org/xml/features/nonvalidating/load-external-dtd", loadExternalDTD) logger.debug("loadXmlFile, loadExternalDTD: ", loadExternalDTD, ", path: ", path) if loadExternalDTD: saxBuilder.setEntityResolver( XMLExternalEntityResolver(self.fileMonitor, str(path), self.shellUtils)) saxBuilder.setFeature( "http://xml.org/sax/features/use-entity-resolver2", 1) doc = None try: fileContent = fileContent or self.fileMonitor.getFileContent(path) if fileContent: try: strContent = String(fileContent) strContent = String( strContent.substring(0, strContent.lastIndexOf('>') + 1)) doc = saxBuilder.build( ByteArrayInputStream(strContent.getBytes())) if container is not None: cfOSH = self.createCF(container, path, fileContent) if cfOSH is not None: self.OSHVResult.add(cfOSH) except: logger.debugException('Failed to load xml file:', path) excMsg = traceback.format_exc() logger.debug(excMsg) except: logger.debugException('Failed to get content of file:', path) excMsg = traceback.format_exc() logger.debug(excMsg) return doc
def __readWsdlWithIBMFactory(wsdlData, importWsdlDocuments=1): if wsdlData == None: raise WSDLException('WSDL Content is Null') else: from com.ibm.wsdl.factory import WSDLFactoryImpl wsdlfactoryIdox = WSDLFactoryImpl() reader = wsdlfactoryIdox.newWSDLReader() if importWsdlDocuments == 1: reader.setFeature('javax.wsdl.importDocuments', Boolean.TRUE) else: reader.setFeature('javax.wsdl.importDocuments', Boolean.FALSE) wsdlData = String(wsdlData.strip()) arr = wsdlData.getBytes() stream = ByteArrayInputStream(arr) inSrc = InputSource(stream) defintion = reader.readWSDL(None, inSrc) return defintion
def __readWsdlWithIBMFactory(wsdlData, importWsdlDocuments = 1): if wsdlData == None: raise WSDLException('WSDL Content is Null') else: from com.ibm.wsdl.factory import WSDLFactoryImpl wsdlfactoryIdox = WSDLFactoryImpl() reader = wsdlfactoryIdox.newWSDLReader() if importWsdlDocuments == 1: reader.setFeature('javax.wsdl.importDocuments', Boolean.TRUE) else: reader.setFeature('javax.wsdl.importDocuments', Boolean.FALSE) wsdlData = String(wsdlData.strip()) arr = wsdlData.getBytes() stream = ByteArrayInputStream(arr) inSrc = InputSource(stream) defintion = reader.readWSDL(None, inSrc) return defintion
def parseNlbProps(output, resultVector, hostOSH, framework, ip): clusterNameAndProps = output.split('\n', 1) #the name of the cluster; also reflected in props that's why commented #clusterName = clusterNameAndProps[0] clusterPropsAndPortRules = re.split(PROP_RULE_SEPARATOR, clusterNameAndProps[1]) #cut the statistics from output portRulesPropsString = re.split('\\s\\sStatistics:', clusterPropsAndPortRules[2])[0] props = Properties() propsString = String(clusterPropsAndPortRules[0]) props.load(ByteArrayInputStream(propsString.getBytes('UTF-8'))) parseRulesConfigOsh = parsePortRules(portRulesPropsString) nlbCluster = NlbClusterBuilder(props, parseRulesConfigOsh, hostOSH, framework, ip) nlbCluster.addOshToVector(resultVector) nlbNode = NlbSwBuilder(props, nlbCluster.getNlbClusterOSH(), hostOSH, framework) nlbNode.addOshToVector(resultVector)
def loadXmlFile(self, path, container = None, fileContent = None): 'str, osh, str -> Document' saxBuilder = SAXBuilder() globalSettings = GeneralSettingsConfigFile.getInstance() #loadExternalDTD = globalSettings.getPropertyBooleanValue('loadExternalDTD', 1) loadExternalDTD = 1 saxBuilder.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", loadExternalDTD) logger.debug("loadXmlFile, loadExternalDTD: ", loadExternalDTD, ", path: ", path ) if loadExternalDTD : saxBuilder.setEntityResolver( XMLExternalEntityResolver( self.fileMonitor, str(path), self.shellUtils ) ) saxBuilder.setFeature("http://xml.org/sax/features/use-entity-resolver2", 1) doc = None try: fileContent = fileContent or self.fileMonitor.getFileContent(path) if fileContent: try: strContent = String(fileContent) strContent = String(strContent.substring(0, strContent.lastIndexOf('>') + 1)) doc = saxBuilder.build(ByteArrayInputStream(strContent.getBytes())) if container is not None: cfOSH = self.createCF(container, path, fileContent) if cfOSH is not None: self.OSHVResult.add(cfOSH) except: logger.debugException('Failed to load xml file:', path) excMsg = traceback.format_exc() logger.debug( excMsg ) except: logger.debugException('Failed to get content of file:', path) excMsg = traceback.format_exc() logger.debug( excMsg ) return doc
except StorageException, e: # No workflow payload, time to create wfChanged = True wfMeta = JsonConfigHelper() wfMeta.set("id", WORKFLOW_ID) wfMeta.set("step", "pending") stages = jsonConfig.getJsonList("stages") for stage in stages: if stage.get("name") == "pending": item_security = stage.getList("visibility") workflow_security = stage.getList("security") # Has the workflow metadata changed? if wfChanged == True: jsonString = String(wfMeta.toString()) inStream = ByteArrayInputStream(jsonString.getBytes("UTF-8")) try: StorageUtils.createOrUpdatePayload(object, "workflow.metadata", inStream) except StorageException, e: print " * workflow-harvester.py : Error updating workflow payload" rules.add(AddField("workflow_id", wfMeta.get("id"))) rules.add(AddField("workflow_step", wfMeta.get("step"))) for group in workflow_security: rules.add(AddField("workflow_security", group)) # some defaults if the above failed if titleList == []: #use object's source id (i.e. most likely a filename) titleList.append(object.getSourceId())
def str2bytes(strIn,encode="US-ASCII"): jStr = String(strIn) #Convert it to a java string if its not already bytes = jStr.getBytes(encode) return bytes
class JwtData(): def __init__(self): pass def __activate__(self, context): self.velocityContext = context self.log = self.vc("log") self.systemConfig = self.vc("systemConfig") self.formData = context["formData"] self.assertionText = self.formData.get("assertion") self.session = self.vc("sessionState") self.response = self.vc("response") self.request = self.vc("request") method = self.request.getMethod() #checking access method if method != "POST": self.log.error("Page not accessed by a POST, method:%s" % method) return self.sharedKey = String(self.systemConfig.getString("", "rapidAafSso", "sharedKey")) self.aud = self.systemConfig.getString("", "rapidAafSso", "aud") self.iss = self.systemConfig.getString("", "rapidAafSso", "iss") #checking signature jwsObject = JWSObject.parse(self.assertionText) verifier = MACVerifier(self.sharedKey.getBytes()) verifiedSignature = jwsObject.verify(verifier) if (verifiedSignature): self.log.debug("Verified JWS signature!") else: self.log.error("Invalid JWS signature!") return self.log.debug(jwsObject.getPayload().toString()) self.log.debug(self.session.toString()) json = JsonSimple(jwsObject.getPayload().toString()) aud = json.getString("", "aud") iss = json.getString("", "iss") nbf = json.getInteger(None, "nbf") exp = json.getInteger(None, "exp") jti = json.getString("", "jti") #checking aud if self.aud != aud: self.log.error("Invalid aud: '%s' expecting: '%s'" % (aud, self.aud)) return #checking iss if self.iss != iss: self.log.error("Invalid iss: '%s' expecting: '%s'" % (iss, self.iss)) return #checking times now = Date().getTime() / 1000 if now < nbf or now > exp: self.log.error("Invalid timings.") return #checking jti attributeDao = ApplicationContextProvider.getApplicationContext().getBean("hibernateAuthUserAttributeDao") params = HashMap() params.put("key", "jti") params.put("value", jti) attrList = attributeDao.query("getUserAttributeByKeyAndValue", params) if attrList.size() > 0: self.log.error("Possible replay attack, jti:'%s' found in DB." % jti) return self.session.put("jwt", jwsObject.getPayload().toString()) self.session.put("jwt_json", json) self.session.put("jwt_assertion", self.assertionText) self.session.put("jwt_exp", exp) self.returnAddress = self.session.get("returnAddress") if self.returnAddress is None: self.log.debug("No return address, using portalPath.") self.returnAddress = self.vc("portalPath") self.log.debug("RapidAAF SSO login complete, redirect to: %s" % self.returnAddress) self.response.sendRedirect(self.returnAddress) # Get from velocity context def vc(self, index): if self.velocityContext[index] is not None: return self.velocityContext[index] else: self.velocityContext["log"].error("ERROR: Requested context entry '{}' doesn't exist", index) return None
wfMetaObj = wfMeta.getJsonObject() wfMetaObj.put("id", WORKFLOW_ID) wfMetaObj.put("step", "pending") wfMetaObj.put("pageTitle", "Uploaded Files - Management") stages = self.config.getJsonSimpleList(["stages"]) for stage in stages: if stage.getString(None, ["name"]) == "pending": wfMetaObj.put("label", stage.getString(None, ["label"])) self.item_security = stage.getStringList(["visibility"]) workflow_security = stage.getStringList(["security"]) self.message_list = stage.getStringList(["message"]) # Has the workflow metadata changed? if wfChanged == True: jsonString = String(wfMeta.toString()) inStream = ByteArrayInputStream(jsonString.getBytes("UTF-8")) try: StorageUtils.createOrUpdatePayload(self.object, "workflow.metadata", inStream) except StorageException, e: print " * workflow-harvester.py : Error updating workflow payload" self.utils.add(self.index, "workflow_id", wfMeta.getString(None, ["id"])) self.utils.add(self.index, "workflow_step", wfMeta.getString(None, ["step"])) self.utils.add(self.index, "workflow_step_label", wfMeta.getString(None, ["label"])) for group in workflow_security: self.utils.add(self.index, "workflow_security", group)
def adler32(s, value=1): if value != 1: raise ValueError, "adler32 only support start value of 1" checksum = Adler32() checksum.update(String.getBytes(s, 'iso-8859-1')) return Long(checksum.getValue()).intValue()
def string_to_input_stream(self, inString): jString = String(inString) # print " * anotar.py : ", jString return ByteArrayInputStream(jString.getBytes("UTF-8"))
def __saveManifest(self): manifestStr = String(self.__manifest.toString(True)) self.__object.updatePayload(self.__object.getSourceId(), ByteArrayInputStream(manifestStr.getBytes("UTF-8")))
ntpipelineapiSet = mbo.getMboSet("NTPIPELINEAPI") if ntpipelineapiSet.isEmpty(): service.error('ntpipeline', 'pipelineMap', None) #Break pipelineapi = ntpipelineapiSet.moveFirst() uri = host + '/os/' uri = uri + pipelineapi.getString("INTOBJECTNAME") uri = uri + '?lean=1&oslc.where=' #MAXDOMAINID=' uri = uri + pipelineapi.getString("UNIQUECOLUMN") + '=' uri = uri + str(mbo.getInt("FOREIGNKEY")) uri = uri + '&oslc.select=*' clientid = "maxadmin" clientsecurity = "maxadmin" # get authentication header auth = clientid + ":" + clientsecurity encodedAuth = String(Base64.encodeBase64(String.getBytes(auth, 'ISO-8859-1')), "UTF-8") authHeader = str(encodedAuth) # get http parameters params = BasicHttpParams() paramsBean = HttpProtocolParamBean(params) paramsBean.setVersion(HttpVersion.HTTP_1_1) paramsBean.setContentCharset("UTF-8") paramsBean.setUseExpectContinue(True) # get http body entities formparams = ArrayList() #formparams.add(BasicNameValuePair("username", username)) entity = UrlEncodedFormEntity(formparams, "UTF-8")