Пример #1
0
 def afterSwapHook(self):
     if self.ridges:
         self.liaison.getMesh().createRidgesGroup("ridges")
     MeshWriter.writeObject3D(self.liaison.getMesh(),
                              "DEBUG" + str(self.cnt), String())
     self.cnt += 1
Пример #2
0
def _buildDocumentForXpath(content, namespaceAware=1):
    r'@types: str, int -> org.w3c.dom.Document'
    xmlFact = DocumentBuilderFactory.newInstance()
    xmlFact.setNamespaceAware(namespaceAware)
    builder = xmlFact.newDocumentBuilder()
    return builder.parse(ByteArrayInputStream(String(content).getBytes()))
Пример #3
0
def remesh(**kwargs):
    """
    Remesh an existing mesh with a singular analytical metric
    """
    # Process coplanarity options
    coplanarity = cos(kwargs["coplanarityAngle"] * pi / 180.0)
    if kwargs["coplanarity"]:
        coplanarity = kwargs["coplanarity"]

    safe_coplanarity = kwargs["safe_coplanarity"]
    if safe_coplanarity is None:
        safe_coplanarity = 0.8
    safe_coplanarity = max(coplanarity, safe_coplanarity)

    # Build background mesh
    try:
        liaison = kwargs["liaison"]
    except KeyError:
        mtb = MeshTraitsBuilder.getDefault3D()
        if kwargs["recordFile"]:
            mtb.addTraceRecord()
        mtb.addNodeSet()
        mesh = Mesh(mtb)
        if kwargs["recordFile"]:
            mesh.getTrace().setDisabled(True)

        MeshReader.readObject3D(mesh, kwargs["in_dir"])
        liaison = MeshLiaison.create(mesh, mtb)

    if kwargs["recordFile"]:
        liaison.getMesh().getTrace().setDisabled(False)
        liaison.getMesh().getTrace().setLogFile(kwargs["recordFile"])
        liaison.getMesh().getTrace().createMesh("mesh", liaison.getMesh())
    if kwargs["immutable_border"]:
        liaison.mesh.tagFreeEdges(AbstractHalfEdge.IMMUTABLE)
    liaison.getMesh().buildRidges(coplanarity)
    if kwargs["preserveGroups"]:
        liaison.getMesh().buildGroupBoundaries()

    immutable_groups = []
    if kwargs["immutable_groups_file"]:
        f = open(kwargs["immutable_groups_file"])
        immutable_groups = f.read().split()
        f.close()
        liaison.mesh.tagGroups(immutable_groups, AbstractHalfEdge.IMMUTABLE)

    if kwargs["recordFile"]:
        cmds = [
            String("assert self.m.checkNoDegeneratedTriangles()"),
            String("assert self.m.checkNoInvertedTriangles()"),
            String("assert self.m.checkVertexLinks()"),
            String("assert self.m.isValid()"),
        ]
        liaison.getMesh().getTrace().setHooks(cmds)

    # Decimate
    if kwargs["decimateSize"] or kwargs["decimateTarget"]:
        decimateOptions = HashMap()
        if kwargs["decimateSize"]:
            decimateOptions.put("size", str(kwargs["decimateSize"]))
        elif kwargs["decimateTarget"]:
            decimateOptions.put("maxtriangles", str(kwargs["decimateTarget"]))
        decimateOptions.put("coplanarity", str(safe_coplanarity))
        QEMDecimateHalfEdge(liaison, decimateOptions).compute()
        swapOptions = HashMap()
        swapOptions.put("coplanarity", str(safe_coplanarity))
        SwapEdge(liaison, swapOptions).compute()

    # Metric
    if kwargs["rho"] > 1.0:
        # mixed metric
        metric = SingularMetric(kwargs["sizeinf"], kwargs["point_metric_file"],
                                kwargs["rho"], True)
    else:
        # analytic metric
        metric = SingularMetric(kwargs["sizeinf"], kwargs["point_metric_file"])

    # Remesh Skeleton
    if kwargs["skeleton"]:
        RemeshSkeleton(liaison, 1.66, metric, 0.01).compute()

    # Remesh
    refineOptions = HashMap()
    refineOptions.put("size", str(kwargs["sizeinf"]))
    refineOptions.put("coplanarity", str(safe_coplanarity))
    refineOptions.put("nearLengthRatio", str(kwargs["nearLengthRatio"]))
    refineOptions.put("project", "false")
    if kwargs["allowNearNodes"]:
        refineOptions.put("allowNearNodes", "true")
    refineAlgo = Remesh(liaison, refineOptions)
    refineAlgo.setAnalyticMetric(metric)
    refineAlgo.compute()

    if not kwargs["noclean"]:
        # Swap
        swapOptions = HashMap()
        swapOptions.put("coplanarity", str(safe_coplanarity))
        swapOptions.put("minCosAfterSwap", "0.3")
        SwapEdge(liaison, swapOptions).compute()

        # Improve valence
        valenceOptions = HashMap()
        valenceOptions.put("coplanarity", str(safe_coplanarity))
        valenceOptions.put("checkNormals", "false")
        ImproveVertexValence(liaison, valenceOptions).compute()

        # Smooth
        smoothOptions = HashMap()
        smoothOptions.put("iterations", str(8))
        smoothOptions.put("check", "true")
        smoothOptions.put("boundaries", "true")
        smoothOptions.put("relaxation", str(0.6))
        if safe_coplanarity >= 0.0:
            smoothOptions.put("coplanarity", str(safe_coplanarity))
        SmoothNodes3DBg(liaison, smoothOptions).compute()

        # Remove Degenerated
        rdOptions = HashMap()
        rdOptions.put("rho", str(kwargs["eratio"]))
        RemoveDegeneratedTriangles(liaison, rdOptions).compute()

    # remesh beams
    if kwargs["wire_size"] > 0.0:
        liaison = remesh_beams(
            liaison=liaison,
            size=kwargs["wire_size"],
            rho=kwargs["rho"],
            immutable_groups=immutable_groups,
            point_metric_file=kwargs["wire_metric_file"],
        )

    # Output
    MeshWriter.writeObject3D(liaison.getMesh(), kwargs["out_dir"], "")
    if kwargs["recordFile"]:
        liaison.getMesh().getTrace().finish()
Пример #4
0
 def characters(self, ch, start, length):
     value = str(String(ch, start, length)).strip()
     if value:
         self.chars += value
Пример #5
0
def __process_rcu_args(optional_arg_map, domain_type, domain_typedef):
    """
    Determine if the RCU is needed and validate/prompt for any missing information
    :param optional_arg_map: the optional arguments map
    :param domain_type:      the domain type
    :param domain_typedef:   the domain_typedef data structure
    :raises CLAException:    if an error occurs getting the passwords from the user or arguments are missing
    """
    _method_name = '__process_rcu_args'

    rcu_schema_count = len(domain_typedef.get_rcu_schemas())
    run_rcu = False
    if CommandLineArgUtil.RUN_RCU_SWITCH in optional_arg_map:
        run_rcu = optional_arg_map[CommandLineArgUtil.RUN_RCU_SWITCH]
        if rcu_schema_count == 0:
            __logger.info('WLSDPLY-12402', _program_name,
                          CommandLineArgUtil.RUN_RCU_SWITCH, domain_type)
            del optional_arg_map[CommandLineArgUtil.RUN_RCU_SWITCH]
            return

    if rcu_schema_count > 0:
        if CommandLineArgUtil.RCU_DB_SWITCH in optional_arg_map:
            if CommandLineArgUtil.RCU_PREFIX_SWITCH in optional_arg_map:
                if run_rcu and CommandLineArgUtil.RCU_SYS_PASS_SWITCH not in optional_arg_map:
                    try:
                        password = getcreds.getpass('WLSDPLY-12403')
                    except IOException, ioe:
                        ex = exception_helper.create_cla_exception(
                            'WLSDPLY-12404',
                            ioe.getLocalizedMessage(),
                            error=ioe)
                        ex.setExitCode(
                            CommandLineArgUtil.ARG_VALIDATION_ERROR_EXIT_CODE)
                        __logger.throwing(ex,
                                          class_name=_class_name,
                                          method_name=_method_name)
                        raise ex
                    optional_arg_map[CommandLineArgUtil.
                                     RCU_SYS_PASS_SWITCH] = String(password)
                if CommandLineArgUtil.RCU_SCHEMA_PASS_SWITCH not in optional_arg_map:
                    try:
                        password = getcreds.getpass('WLSDPLY-12405')
                    except IOException, ioe:
                        ex = exception_helper.create_cla_exception(
                            'WLSDPLY-12406',
                            ioe.getLocalizedMessage(),
                            error=ioe)
                        ex.setExitCode(
                            CommandLineArgUtil.ARG_VALIDATION_ERROR_EXIT_CODE)
                        __logger.throwing(ex,
                                          class_name=_class_name,
                                          method_name=_method_name)
                        raise ex
                    optional_arg_map[CommandLineArgUtil.
                                     RCU_SCHEMA_PASS_SWITCH] = String(password)
            else:
                ex = exception_helper.create_cla_exception(
                    'WLSDPLY-12407', _program_name,
                    CommandLineArgUtil.RCU_DB_SWITCH,
                    CommandLineArgUtil.RCU_PREFIX_SWITCH)
                ex.setExitCode(CommandLineArgUtil.USAGE_ERROR_EXIT_CODE)
                __logger.throwing(ex,
                                  class_name=_class_name,
                                  method_name=_method_name)
                raise ex
Пример #6
0
 def test_with_class_type(self):
     assert_true('java.lang.String' in unic(String('').getClass()))
Пример #7
0
 def __get_key_from_p12(self, file, password=None):
     io = FileInputStream(file)
     pkcs12 = PKCS12(io)
     if password is not None:
         pkcs12.decrypt(String(password).toCharArray())
     return pkcs12.getKey()
Пример #8
0
 def encodeProvider(self, name):
     enc = { "provider" : name }
     return Base64Util.base64urlencode(String(json.dumps(enc)).getBytes())
Пример #9
0
def mkprops():
    props = Properties()
    props.setProperty("java.ext.dirs", String(JARDIR))
    props.setProperty("python.security.respectJavaAccessibility",
                      String("true"))
    return props
Пример #10
0
#sweety.posture("neutral")
#sweety.mouthState("smile")
sweety.mouth.setGoogleURI("http://thehackettfamily.org/Voice_api/api2.php?voice=claire&txt=")

sweety.chatBot.startSession("ProgramAB", "default", "sweety")
######################################################################
# create the speech recognition service
######################################################################
pats = sweety.chatBot.listPatterns("chatBot")
# create the grammar for the speech recognition service
sphinx_grammar = "|".join(pats)
sweety.ear.startListening(sphinx_grammar)

######################################################################
# MRL Routing   sphinx -> program ab -> htmlfilter -> sweety
######################################################################
# add a route from Sphinx to ProgramAB
sweety.ear.addTextListener(sweety.chatBot)
 
# Add route from Program AB to html filter
sweety.chatBot.addTextListener(sweety.htmlFilter)
# Add route from html filter to mouth
sweety.htmlFilter.addListener("publishText", python.name, "talk", String().getClass());
 
# make sure the ear knows if it's speaking.
sweety.ear.attach(sweety.mouth)

def talk(data):
	#sweety.saying(data)
	sweety.mouth(data)
  	print "Saying :", data
Пример #11
0
	liaison.getMesh().getTrace().createMesh("mesh", liaison.getMesh())
if options.immutable_border:
	liaison.mesh.tagFreeEdges(AbstractHalfEdge.IMMUTABLE)
liaison.getMesh().buildRidges(coplanarity)
if options.preserveGroups:
	liaison.getMesh().buildGroupBoundaries()

immutable_groups = []
if options.immutable_groups_file:
	f = open(options.immutable_groups_file)
	immutable_groups = f.read().split()
	f.close()
	liaison.mesh.tagGroups(immutable_groups, AbstractHalfEdge.IMMUTABLE)

if options.recordFile:
	cmds = [ String("assert self.m.checkNoDegeneratedTriangles()"),
			 String("assert self.m.checkNoInvertedTriangles()"),
			 String("assert self.m.checkVertexLinks()"),
			 String("assert self.m.isValid()") ]
	liaison.getMesh().getTrace().setHooks(cmds)

## Decimate
if options.decimateSize or options.decimateTarget:
	decimateOptions = HashMap()
	if options.decimateSize:
		decimateOptions.put("size", str(options.decimateSize))
	elif options.decimateTarget:
		decimateOptions.put("maxtriangles", str(options.decimateTarget))
	decimateOptions.put("coplanarity", str(coplanarity))
	QEMDecimateHalfEdge(liaison, decimateOptions).compute()
	swapOptions = HashMap()
Пример #12
0
    type="float",
    dest="coplanarity",
    help=
    "minimum dot product of face normals when building feature edges (default 0.95)"
)

(options, args) = parser.parse_args(args=sys.argv[1:])

if len(args) != 2:
    parser.print_usage()
    sys.exit(1)

xmlDir = args[0]
outDir = args[1]

mtb = MeshTraitsBuilder.getDefault3D()
mesh = Mesh(mtb)
MeshReader.readObject3D(mesh, xmlDir)
liaison = MeshLiaison(mesh, mtb)

if options.coplanarity:
    liaison.getMesh().buildRidges(options.coplanarity)
if options.preserveGroups:
    liaison.getMesh().buildGroupBoundaries()

opts = HashMap()
if options.coplanarity:
    opts.put("coplanarity", str(options.coplanarity))
ImproveEdgeConnectivity(liaison, opts).compute()
MeshWriter.writeObject3D(liaison.getMesh(), outDir, String())
Пример #13
0
list1 = ['a']
list2 = ['a', 'b']
list3 = ['a', 'b', 'c']
dict0 = {}
dict1 = {'a':1}
dict2 = {'a':1, 'b':2}
dict3 = {'a':1, 'b':2, 'c':3}
custom0 = _Custom(0)
custom1 = _Custom(1)
custom2 = _Custom(2)
custom3 = _Custom(3)
length_method = _LengthMethod()
size_method = _SizeMethod()
length_attribute = _LengthAttribute()
if os.name == 'java':
    string0 = String()
    string1 = String('a')
    string2 = String('ab')
    string3 = String('abc')
    hashtable0 = Hashtable()
    hashtable1 = _create_hashtable(dict1)
    hashtable2 = _create_hashtable(dict2)
    hashtable3 = _create_hashtable(dict3)
    vector0 = Vector()
    vector1 = Vector(list1)
    vector2 = Vector(list2)
    vector3 = Vector(list3)
    array0 = jarray.array(list0, String)
    array1 = jarray.array(list1, String)
    array2 = jarray.array(list2, String)
    array3 = jarray.array(list3, String)
Пример #14
0
def decryptBytes(bytes):
    encryptionService = SerializedSystemIni.getEncryptionService(DOMAIN_DIR)
    ces = ClearOrEncryptedService(encryptionService)
    clear = ces.decryptBytes(bytes)
    return str(String(clear))
Пример #15
0
arduino.setSerialDevice(comPort, 57600, 8, 1, 0)
sleep(1)  # give it a second for the serial device to get ready

# update the gui with configuration changes
arduino.publishState()

# Test arduino
arduino.digitalWrite(ledPin, 1)
sleep(1)  # sleep half a second
arduino.digitalWrite(ledPin, 0)
# start listening for the words we are interested in
ear.startListening("led on | led off | test")

# set up a message route from the ear --to--> python method "heard"
ear.addListener("recognized", python.name, "heard",
                String().getClass())

# this method is invoked when something is
# recognized by the ear - in this case we
# actuate the led state and print the word recognized


def heard():
    data = msg_ear_recognized.data[0]
    print "heard ", data
    if (data == "led on"):
        print "Turning on the light"
        arduino.digitalWrite(ledPin, 1)
        sleep(0.5)  # sleep half a second
        mouth.speak("The led is on ")
    elif (data == "led off"):
 def test_java_object(self):
     for item, exp in [(String(), 'String'), (String, 'Class'),
                       (java.lang, 'javapackage'),
                       (java, 'javapackage')]:
         assert_equal(type_name(item), exp)
Пример #17
0
    def __reportSearch(self):
        self.reportId = self.request.getParameter("id")
        self.format = self.request.getParameter("format")
        self.report = self.reportManager.getReports().get(self.reportId)
        self.reportQuery = self.report.getQueryAsString()
        self.log.debug("Report query: " + self.reportQuery)

        #Get a total number of records
        try:
            out = ByteArrayOutputStream()
            recnumreq = SearchRequest(self.reportQuery)
            recnumreq.setParam("rows", "0")
            self.indexer.search(recnumreq, out)
            recnumres = SolrResult(ByteArrayInputStream(out.toByteArray()))
            self.__rowsFoundSolr = "%s" % recnumres.getNumFound()
        except:
            self.errorMsg = "Query failure. The issue has been logged (%s - %s)." % (
                sys.exc_info()[0], sys.exc_info()[1])
            self.log.error(
                "Reporting threw an exception (report was %s): %s - %s" %
                (self.report.getLabel(), sys.exc_info()[0], sys.exc_info()[1]))
            return

        #Setup the main query
        req = SearchRequest(self.reportQuery)
        req.setParam("fq", 'item_type:"object"')
        req.setParam("fq", 'workflow_id:"dataset"')
        req.setParam("rows", self.__rowsFoundSolr)
        try:
            #Now do the master search
            out = ByteArrayOutputStream()
            self.indexer.search(req, out)
            self.__reportResult = SolrResult(
                ByteArrayInputStream(out.toByteArray()))
            self.__checkResults()
        except:
            self.errorMsg = "Query failure. The issue has been logged (%s - %s)." % (
                sys.exc_info()[0], sys.exc_info()[1])
            self.log.error(
                "Reporting threw an exception (report was %s): %s - %s" %
                (self.report.getLabel(), sys.exc_info()[0], sys.exc_info()[1]))
            return

        #At this point the display template has enough to go with.
        #We just need to handle the CSV now
        if (self.format == "csv"):
            #Setup the main query - we need to requery to make sure we return
            #only the required fields. We'll use the specific IDs that met the
            #__checkResults check
            req = SearchRequest(self.reportQuery)
            req.setParam("fq", 'item_type:"object"')
            req.setParam("fq", 'workflow_id:"dataset"')
            req.setParam("rows", self.__rowsFoundSolr)
            req.setParam("csv.mv.separator", ";")

            #we need to get a list of the matching IDs from Solr
            #this doesn't work for long queries so it's abandoned
            #but left here commented to make sure we don't try it again
            #idQry = ""
            #for item in self.getProcessedResultsList():
            #    idQry += item.get("id") + " OR "
            #req.setParam("fq", 'id:(%s)' % idQry[:len(idQry)-4])

            #Create a list of IDs for reference when preparing the CSV
            idQryList = []
            for item in self.getProcessedResultsList():
                idQryList.append(item.get("id"))

            #Setup SOLR query with the required fields
            self.fields = self.systemConfig.getArray("redbox-reports",
                                                     "csv-output-fields")
            #We must have an ID field and it must be the first field
            fieldString = "id,"
            if self.fields is not None:
                for field in self.fields:
                    fieldString = fieldString + field.get("field-name") + ","
                fieldString = fieldString[:-1]

            req.setParam("fl", fieldString)

            out = ByteArrayOutputStream()
            try:
                self.indexer.search(req, out, self.format)
            except:
                #We can't get the result back from SOLR so fail back to the template display
                self.errorMsg = "Query failure. Failed to load the data - this issue has been logged (%s - %s)." % (
                    sys.exc_info()[0], sys.exc_info()[1])
                self.log.error(
                    "Reporting threw an exception (report was %s); Error: %s - %s"
                    % (self.report.getLabel(), sys.exc_info()[0],
                       sys.exc_info()[1]))
                return
            try:
                csvResponseString = String(out.toByteArray(), "utf-8")
                csvResponseLines = csvResponseString.split("\n")
            except:
                #We can't get the result back from SOLR so fail back to the template display
                self.errorMsg = "Query failure. Failed to prepare the CSV - this issue has been logged (%s - %s)." % (
                    sys.exc_info()[0], sys.exc_info()[1])
                self.log.error(
                    "Reporting threw an exception (report was %s); Error: %s - %s"
                    % (self.report.getLabel(), sys.exc_info()[0],
                       sys.exc_info()[1]))
                return

            fileName = self.urlEncode(self.report.getLabel())
            self.log.debug("Generating CSV report with file name: " + fileName)
            self.response.setHeader("Content-Disposition",
                                    "attachment; filename=%s.csv" % fileName)

            sw = StringWriter()
            parser = CSVParser()
            writer = CSVWriter(sw)
            count = 0

            prevLine = ""
            badRowFlag = False

            for line in csvResponseLines:
                if badRowFlag:
                    #In this section of code we'll handle errors by either trying to fix the problem
                    #or by adding an error line in the CSV. We'll then move to the next row and keep going
                    try:
                        self.log.debug(
                            "Reporting - trying to append the previous line with the previous faulty one. Line appears as: %s"
                            % prevLine + line)
                        csvLine = parser.parseLine(prevLine + line)
                        badRowFlag = False
                        prevLine = ""
                        self.log.debug(
                            "Reporting - remedy appears to have worked. Line appears as: %s"
                            % prevLine + line)
                    except:
                        #We tried to rescue the file but failed on the second run so give up
                        writer.writeNext(
                            ["Failed to transfer record to CSV - check logs"])
                        self.log.error(
                            "Reporting threw an exception (report was %s); Error: %s - %s; Result line: %s"
                            % (self.report.getLabel(), sys.exc_info()[0],
                               sys.exc_info()[1], prevLine + line))
                else:
                    try:
                        csvLine = parser.parseLine(line)
                        badRowFlag = False
                        prevLine = ""
                    except:
                        #This can happen if there's a newline in the index data
                        #so we raise the badRowFlag and see if we can join this
                        #row to the next one to fix it
                        self.log.debug(
                            "Reporting threw an exception but I'll see if it's just a formatting issue (report was %s); Error: %s - %s; Result line: %s"
                            % (self.report.getLabel(), sys.exc_info()[0],
                               sys.exc_info()[1], line))
                        badRowFlag = True
                        prevLine = line
                        continue

                if count == 0:
                    #Header row
                    count += 1
                    for idx, csvValue in enumerate(csvLine):
                        csvLine[idx] = self.findDisplayLabel(csvValue)
                elif csvLine[0] not in idQryList:
                    #ignore
                    continue

                writer.writeNext(csvLine)

            #Now send off the CSV
            self.out = self.response.getOutputStream("text/csv")
            self.out.print(sw.toString())
            self.out.close()
 def test_java_strings_are_not_list_like(self):
     assert_equal(is_list_like(String()), False)
    def testIndirectEncryptionVariables(self):
        copy2(self._src_model_file_w_variables, self._target_model_test3)
        copy2(self._src_variable_file, self._target_variables_test3)

        args = list()
        args.append(
            'encrypt')  # dummy arg for args[0] to get arg padding right
        args.append(CommandLineArgUtil.ORACLE_HOME_SWITCH)
        args.append(self._oracle_home)
        args.append(CommandLineArgUtil.MODEL_FILE_SWITCH)
        args.append(self._target_model_test3)
        args.append(CommandLineArgUtil.VARIABLE_FILE_SWITCH)
        args.append(self._target_variables_test3)
        args.append(CommandLineArgUtil.PASSPHRASE_SWITCH)
        args.append(self._passphrase)
        exit_code = encrypt._process_request(args)
        self.assertEquals(exit_code, 0)

        model = FileToPython(self._target_model_test3).parse()
        variables = variables_helper.load_variables(
            self._target_variables_test3)
        passphrase_array = String(self._passphrase).toCharArray()

        admin_pass = model['domainInfo']['AdminPassword']
        self.assertNotEquals(admin_pass.startswith('{AES}'), True)
        admin_pass = variables['admin.password']
        self.assertEquals(admin_pass.startswith('{AES}'), True)
        _decrypted_admin_pass = EncryptionUtils.decryptString(
            admin_pass, passphrase_array)
        self.assertEquals(str(String(_decrypted_admin_pass)),
                          self._unencrypted_password)

        nm_pass = model['topology']['SecurityConfiguration'][
            'NodeManagerPasswordEncrypted']
        self.assertNotEquals(nm_pass.startswith('{AES}'), True)
        nm_pass = variables['nm.password']
        self.assertEquals(nm_pass.startswith('{AES}'), True)
        _decrypted_nm_pass = EncryptionUtils.decryptString(
            nm_pass, passphrase_array)
        self.assertEquals(str(String(_decrypted_nm_pass)),
                          self._unencrypted_password)

        ds1_pass = model['resources']['JDBCSystemResource']['Generic1'][
            'JdbcResource']['JDBCDriverParams']['PasswordEncrypted']
        self.assertEquals(ds1_pass.startswith('{AES}'), True)
        _decrypted_ds1_pass = EncryptionUtils.decryptString(
            ds1_pass, passphrase_array)
        self.assertEquals(str(String(_decrypted_ds1_pass)),
                          self._unencrypted_password)

        ons_pass = \
            model['resources']['JDBCSystemResource']['Generic1']['JdbcResource']['JDBCOracleParams']['OnsWalletPasswordEncrypted']
        self.assertNotEquals(ons_pass.startswith('{AES}'), True)
        ons_pass = variables['slc05til.ons.pass']
        self.assertEquals(ons_pass.startswith('{AES}'), True)
        _decrypted_ons_pass = EncryptionUtils.decryptString(
            ons_pass, passphrase_array)
        self.assertEquals(str(String(_decrypted_ons_pass)),
                          self._unencrypted_password)

        ds2_pass = model['resources']['JDBCSystemResource']['Generic2'][
            'JdbcResource']['JDBCDriverParams']['PasswordEncrypted']
        self.assertEquals(ds2_pass.startswith('{AES}'), True)
        _decrypted_ds2_pass = EncryptionUtils.decryptString(
            ds2_pass, passphrase_array)
        self.assertEquals(str(String(_decrypted_ds2_pass)),
                          self._unencrypted_password)
        return
Пример #20
0
from java.lang import Long, Integer, Short, Character, Byte, Boolean, Double, Float, String
from java.math import BigInteger

values = [
    0, 0L, 0.0, 'c', "string", (), o, c, foo, subfoo, subfoo2, n, Object,
    Class, Foo, SubFoo, C, SubFoo2, N, Integer,
    Long(0),
    Integer(0),
    Short(0),
    Character('c'),
    Byte(0),
    Boolean(0),
    Double(0),
    Float(0),
    String("js"),
    BigInteger("0")
]


def pp(v):
    inst = d.get(v, None)
    if inst is not None:
        return inst
    if hasattr(v, '__name__'):
        return v.__name__
    if isinstance(v, (String, Number, Character, Boolean)):
        n = v.__class__.__name__
        return "%s[%s]" % (n[n.rfind('.') + 1:], v)
    return repr(v)
Пример #21
0
 def _getContentBytes(self, contentString):
     if not contentString: raise ValueError("content is empty")
     bytes = String(contentString).getBytes()
     zipper = ChecksumZipper()
     zippedBytes = zipper.zip(bytes)
     return zippedBytes
Пример #22
0
 def write(self, *args):
     self._output.write(String(args[0]))
Пример #23
0
 def onCall(self):
     # Force an exception from Java.
     String("a").charAt(10)
Пример #24
0
 def _char_slice_to_unicode(self, characters, start, length):
     """Convert a char[] slice to a PyUnicode instance"""
     text = Py.newUnicode(String(characters[start:start + length]))
     return text
Пример #25
0
    def Collection_Btree(self, key):
        """
        """
        name = 'Collection_Btree'
        keyList = []
        for i in range(4):                    # Create 5 key
            keyList.append(key + str(i))
    
        bkeyBASE = "bkey_byteArry"

        eflag = String("EFLAG").getBytes()
        filter = ElementFlagFilter(ElementFlagFilter.CompOperands.Equal, String("EFLAG").getBytes()) 
        attr = CollectionAttributes()
        attr.setExpireTime(ExpireTime)
        
        # BopInsert + byte_array bkey
        for j in range(4):                        # 5 Key 
            for i in range(50):                   # Insert 50 bkey
                bk = bkeyBASE + str(j) + str(i)   # Uniq bkey
                bkey = String(String.valueOf(bk)).getBytes()                        ####____####
                future = self.client.asyncBopInsert(keyList[j], bkey, eflag, random.choice(workloads), attr)
                result = self.arcusGet(future, name=name)
                #print str(result)

        # Bop Bulk Insert (Piped Insert)
        elements = []
        for i in range(50):
            bk = bkeyBASE + str(0) + str(i) + "bulk"
            elements.append(Element(String(str(bk)).getBytes(), workloads[0], eflag)) ####____####
        future = self.client.asyncBopPipedInsertBulk(keyList[0], elements, CollectionAttributes())
        result = self.arcusGet(future, name=name)
        #print str(result)
        
        # BopGet Range + filter
        for j in range(4):                    
            bk = bkeyBASE + str(j) + str(0)  
            bk_to = bkeyBASE + str(j) + str(50) 
            bkey = String(String.valueOf(bk)).getBytes()
            bkey_to = String(String.valueOf(bk_to)).getBytes()           ####____####
            future = self.client.asyncBopGet(keyList[j], bkey, bkey_to, filter, 0, random.randint(20, 50), False, False)
            result = self.arcusGet(future, name=name)
            #print str(result)
        
	# BopGetBulk  // 20120319 Ad
        bk = bkeyBASE + str(0) + str(0)                                                                                                                                      
        bk_to = bkeyBASE + str(4) + str(50)                                                                                                                                  
        bkey = String(String.valueOf(bk)).getBytes()                                                                                                                         
        bkey_to = String(String.valueOf(bk_to)).getBytes()           ####____####                                                                                            
        future = self.client.asyncBopGetBulk(keyList, bkey, bkey_to, filter, 0, random.randint(20, 50))                                                         
        result = self.arcusGet(future, name=name)
	#for entry in result.entrySet():
	#	print str(entry.getKey())

	#	if entry.getValue().getElements() is not None:
	#		print "["
	#		for element in entry.getValue().getElements().entrySet():
	#			print "bkey=%s, value=%s" % (str(element.getKey()), str(element.getValue().getValue()))
	#		print "]"
	#	else:
	#		print "[elements=%s, response=%s]" % (entry.getValue().getElements(), entry.getValue().getCollectionResponse().getMessage())
	#print ""
	#print str(result)
	
        # BopEmpty Create
        future = self.client.asyncBopCreate(key, ElementValueType.STRING, CollectionAttributes())
        result = self.arcusGet(future, name=name)
        #print str(result)

        # BopSMGet
        bk = bkeyBASE + str(0) + str(0)  
        bk_to = bkeyBASE + str(4) + str(50) 
        bkey = String(String.valueOf(bk)).getBytes()             ####____####
        bkey_to = String(String.valueOf(bk_to)).getBytes()       ####____####
        future = self.client.asyncBopSortMergeGet(keyList, bkey, bkey_to, filter, 0, random.randint(20, 50))
        result = self.arcusGet(future, name=name)
        #print str(result)
        
        # BopUpdate  (eflag bitOP + value)
        key = keyList[0]
        eflagOffset = 0
        value = "ThisIsChangeValue"
        bitop = ElementFlagUpdate(eflagOffset, ElementFlagFilter.BitWiseOperands.AND, String("aflag").getBytes())
        for i in range(2):                      # 3 element update
            bk = bkeyBASE + str(0) + str(i)
            bkey = String(String.valueOf(bk)).getBytes()       ####____####
            future = self.client.asyncBopUpdate(key, bkey, bitop, value)       
            result = self.arcusGet(future, name=name)
            #print str(result)
        
        # SetAttr  (change Expire Time)
        attr.setExpireTime(100)
        future = self.client.asyncSetAttr(key, attr)
        result = self.arcusGet(future, name=name)
        #print str(result)

        # BopDelete          (eflag filter delete)
        for j in range(4):                     
            bk = bkeyBASE + str(j) + str(0)  
            bk_to = bkeyBASE + str(j) + str(10) 
            bkey = String(String.valueOf(bk)).getBytes() ####____####
            bkey_to = String(String.valueOf(bk_to)).getBytes()  ####____####
            future = self.client.asyncBopDelete(keyList[j], bkey, bkey_to, filter, 0, False)
            result = self.arcusGet(future, name=name)
Пример #26
0
def __remesh(options):
    afront_stderr = getattr(options, 'afront_stderr', None)
    mesh = getattr(options, 'mesh', None)
    liaison = getattr(options, 'liaison', None)
    if not liaison:
        if not mesh:
            mesh = create_mesh(**options)
        liaison = MeshLiaison.create(mesh)

    if options.recordFile:
        liaison.getMesh().getTrace().setDisabled(False)
        liaison.getMesh().getTrace().setLogFile(options.recordFile)
        liaison.getMesh().getTrace().createMesh("mesh", liaison.getMesh())
    if options.immutable_border:
        liaison.mesh.tagFreeEdges(AbstractHalfEdge.IMMUTABLE)

    liaison.getMesh().buildRidges(options.coplanarity)
    if options.immutable_border_group:
        liaison.mesh.tagGroupBoundaries(AbstractHalfEdge.IMMUTABLE)
    else:
        if options.preserveGroups:
            liaison.getMesh().buildGroupBoundaries()

    immutable_groups = []
    if options.immutable_groups_file:
        immutable_groups = read_groups(options.immutable_groups_file)
        liaison.mesh.tagGroups(immutable_groups, AbstractHalfEdge.IMMUTABLE)

    if options.point_metric_file:
        point_metric = DistanceMetric(options.size, options.point_metric_file)
    elif getattr(options, 'point_metric', None):
        point_metric = options.point_metric
    else:
        point_metric = None
    safe_coplanarity = str(max(options.coplanarity, 0.8))

    if options.forced_points:
        if point_metric:
            vi = VertexInsertion(liaison, point_metric)
        else:
            vi = VertexInsertion(liaison, options.size)
        vi.insertNodes(options.forced_points, -1)
        Vertex.setMutable(vi.mutableInserted, False)

    #0
    writeVTK(liaison)
    if options.boundary_angle == None:
        options.boundary_angle = 1.66
    if point_metric:
        point_metric.scaling = 1
        if options.forced_bounds:
            BeamInsertion(liaison.mesh,
                          point_metric).insert(options.forced_bounds[0],
                                               options.forced_bounds[1])
        RemeshSkeleton(liaison, options.boundary_angle, options.size / 100.0,
                       point_metric).compute()
    else:
        RemeshSkeleton(liaison, options.boundary_angle, options.size / 100.0,
                       options.size).compute()
        if options.forced_bounds:
            BeamInsertion(liaison.mesh,
                          options.size).insert(options.forced_bounds[0],
                                               options.forced_bounds[1])

    #1
    writeVTK(liaison)
    opts = HashMap()
    opts.put("coplanarity", safe_coplanarity)
    # Swapping here will help QEMDecimateHalfEdge to decimate more and will
    # reduce the risk to have edge not processed by LengthDecimateHalfEdge
    algo = SwapEdge(liaison, opts)
    algo.maxSwapVolume = (options.size / 4.0)**3
    algo.compute()

    #2
    writeVTK(liaison)

    if options.recordFile:
        cmds = [
            String("assert self.m.checkNoDegeneratedTriangles()"),
            String("assert self.m.checkNoInvertedTriangles()"),
            String("assert self.m.checkVertexLinks()"),
            String("assert self.m.isValid()")
        ]
        liaison.getMesh().getTrace().setHooks(cmds)

    opts.clear()
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("size", str(options.size * 0.3))
    opts.put("maxlength", str(options.size * sqrt(2)))
    #workaround for a QEMDecimateHalfEdge bug
    opts.put("freezeNonManifold", "true")
    algo = QEMDecimateHalfEdge(liaison, opts)
    if point_metric:
        point_metric.scaling = sqrt(2)
        algo.analyticMetric = point_metric
    algo.compute()

    #3
    # afront call
    writeVTK(liaison)
    afront_nodes_reader = None
    afront_frozen = None
    if options.afront_path:
        tmp_dir = tempfile.mkdtemp()
        afront_nodes_reader = afront(options.afront_path,
                                     tmp_dir,
                                     liaison.mesh,
                                     options.size,
                                     point_metric,
                                     immutable_groups,
                                     afront_stderr=afront_stderr)
        afront_frozen = afront_insert(liaison, afront_nodes_reader,
                                      options.size, point_metric)
        Vertex.setMutable(afront_frozen, False)
        shutil.rmtree(tmp_dir, ignore_errors=True)

    #4
    writeVTK(liaison)
    if options.afront_path:
        opts.clear()
        opts.put("expectInsert", "false")
        opts.put("coplanarity", safe_coplanarity)
        SwapEdge(liaison, opts).compute()

    #5
    writeVTK(liaison)
    opts.clear()
    opts.put("size", str(options.size))
    opts.put("freeEdgesOnly", "true")
    opts.put("coplanarity", "-2")
    algo = LengthDecimateHalfEdge(liaison, opts)
    if point_metric:
        algo.analyticMetric = point_metric
    algo.compute()

    #6
    writeVTK(liaison)
    opts.clear()
    opts.put("size", str(options.size))
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("minCosAfterSwap", "0.3")
    opts.put("nearLengthRatio", "0.6")
    algo = Remesh(liaison, opts)
    if point_metric:
        point_metric.scaling = 1
        algo.analyticMetric = point_metric
    algo.compute()

    #7
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", safe_coplanarity)
    opts.put("expectInsert", "false" if options.afront_path else "true")
    SwapEdge(liaison, opts).compute()

    #8
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("iterations", "2")
    opts.put("size", str(options.size))
    algo = SmoothNodes3DBg(liaison, opts)
    algo.compute()

    #9
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("expectInsert", "false" if options.afront_path else "true")
    opts.put("minCosAfterSwap", "0.3")
    algo = SwapEdge(liaison, opts)
    algo.angleQualityRatio = 150
    algo.compute()

    #10
    writeVTK(liaison)
    if not options.afront_path:
        opts.clear()
        opts.put("size", str(options.size))
        algo = Remesh(liaison, opts)
        algo.analyticMetric = point_metric
        algo.compute()

    #11
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("size", str(options.size * 0.3))
    opts.put("maxlength", str(options.size * sqrt(2)))
    #workaround for a QEMDecimateHalfEdge bug
    opts.put("freezeNonManifold", "true")
    algo = QEMDecimateHalfEdge(liaison, opts)
    if point_metric:
        point_metric.scaling = sqrt(2)
        algo.analyticMetric = point_metric
    algo.compute()

    #12
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", str(options.coplanarity))
    opts.put("expectInsert", "false" if options.afront_path else "true")
    opts.put("minCosAfterSwap", "0.3")
    algo = SwapEdge(liaison, opts)
    algo.angleQualityRatio = 150
    algo.compute()

    #13
    writeVTK(liaison)

    if afront_frozen:
        Vertex.setMutable(afront_frozen, True)

    opts.clear()
    opts.put("checkNormals", "false")
    ImproveVertexValence(liaison, opts).compute()

    #14
    writeVTK(liaison)

    opts.clear()
    opts.put("coplanarity", safe_coplanarity)
    opts.put("iterations", str(8))
    algo = SmoothNodes3DBg(liaison, opts)
    algo.compute()

    #15
    writeVTK(liaison)

    #MeshWriter.writeObject3D(liaison.mesh, outDir, ""
    polylines = PolylineFactory(liaison.mesh, 135.0, options.size * 0.2)
    liaison.mesh.resetBeams()
    for entry in polylines.entrySet():
        groupId = entry.key
        for polyline in entry.value:
            listM = ArrayList()
            for v in polyline:
                listM.add(EuclidianMetric3D(options.size))
            #print "Remesh polyline of group "+str(groupId)+"/"+str(polylines.size())+" "+str(polyline.size())+" vertices"
            if liaison.mesh.getGroupName(groupId) in immutable_groups:
                result = polyline
            elif point_metric:
                result = RemeshPolyline(liaison.mesh, polyline,
                                        point_metric).compute()
            else:
                result = RemeshPolyline(liaison.mesh, polyline,
                                        listM).compute()
            for i in xrange(result.size() - 1):
                liaison.mesh.addBeam(result.get(i), result.get(i + 1), groupId)
            #print "  New polyline: "+str(result.size())+" vertices"

    if options.recordFile:
        liaison.getMesh().getTrace().finish()

    if options.post_script:
        execfile(options.post_script)
    if options.out_dir:
        MeshWriter.writeObject3D(liaison.mesh, options.out_dir, "")
Пример #27
0
 def listdir(path):
     items = os._orig_listdir(path)
     if isinstance(path, unicode):
         items = [unicode(String(i).toString()) for i in items]
     return items
Пример #28
0
assert BigInteger('1234', 10).intValue() == 1234, 'BigInteger(string)'
assert BigInteger([0x11, 0x11,
                   0x11]).intValue() == 0x111111, 'BigInteger(byte[])'
assert BigInteger(
    -1, [0x11, 0x11, 0x11]).intValue() == -0x111111, 'BigInteger(int, byte[])'

print_test('call static methods')
s1 = String.valueOf(['1', '2', '3'])
s2 = String.valueOf('123')
s3 = String.valueOf(123)
s4 = String.valueOf(123l)
s5 = String.valueOf(['0', '1', '2', '3', 'a', 'b'], 1, 3)
assert s1 == s2 == s3 == s4 == s5, 'String.valueOf method with different arguments'

print_test('call instance methods')
s = String('hello')
assert s.regionMatches(1, 1, 'ell', 0, 3), 'method call with boolean true'
assert s.regionMatches(0, 1, 'ell', 0, 3), 'method call with boolean false'
assert s.regionMatches(1, 'ell', 0, 3), 'method call no boolean'

assert s.regionMatches(1, 1, 'eLl', 0, 3), 'method call ignore case'
assert not s.regionMatches(1, 'eLl', 0, 3), 'should ignore case'

from java.awt import Dimension

print_test('get/set fields')
d = Dimension(3, 9)
assert d.width == 3 and d.height == 9, 'getting fields'
d.width = 42
assert d.width == 42 and d.height == 9, 'setting fields'
Пример #29
0
def getDataSets(fileName, attributes, queryParameters, ptDataDescription):

    #Converts the attributes from a java.util.List to a python list
    attributes = JUtil.javaStringListToPylist(attributes)

    #Converts the query parameters from a java.util.List to a python list
    queryParameters = JUtil.javaStringListToPylist(queryParameters)

    dataSets = {}
    file = NetCDFFile(str(fileName))

    #Parses the query parameters into a more easily managed form
    queryParameters = parseQueryParams(file, queryParameters)

    #Initializes the lists to be constructed
    for attr in attributes:
        dataSets[attr] = []

    #Gets the data from the netCDF.  The data retrieved is filtered according to
    #the query parameters provided.  This is essentially an inefficient query procedure
    for recordIndex in range(0, file.getVariable('observationTime').shape[0]):
        if _checkConditions(file, recordIndex, queryParameters):
            for attr in attributes:
                dataItem = file.getData(attr, recordIndex)
                dataSets[attr].append(dataItem)

    #If no data is retrieved, simply return an empty container
    if len(dataSets[dataSets.keys()[0]]) == 0:
        return PointDataContainer()

    recs = ArrayList()

    #Cycle through the requested attributes and format the data into a PointDataContainer
    for attr in attributes:

        #Get the type and size of the retrieved data
        dataType = type(dataSets[attr][0])

        sz = len(dataSets[attr])
        sizes = numpy.zeros(1, numpy.int32)
        sizes[0] = sz

        #If the dataset is an array, construct the IDataRecord accordingly
        if dataType == numpy.ndarray:
            sizes = numpy.zeros(2, numpy.int32)
            arrLen = len(dataSets[attr][0])
            sizes[0] = sz
            sizes[1] = arrLen
            arrType = type(dataSets[attr][0][0])

            if arrType == numpy.float32 or arrType == numpy.float64:
                arr = numpy.zeros((sz, arrLen), numpy.float32)
                for i in range(0, sz):
                    arr[0:][i] = dataSets[attr][i]
                arr = numpy.resize(arr, (1, sz * arrLen))
                rec = FloatDataRecord()
                rec.setFloatData(arr)
            elif arrType == numpy.int16 or arrType == numpy.int8 or arrType == numpy.int32:
                arr = numpy.zeros((sz, arrLen), numpy.int32)
                for i in range(0, sz):
                    arr[0:][i] = dataSets[attr][i]
                arr = numpy.resize(arr, (1, sz * arrLen))
                rec = IntegerDataRecord()
                rec.setIntData(arr)
            elif arrType == numpy.string_:
                jstr = jep.jarray(sz, String)
                for i in range(sz):
                    dataString = file.charArrayToString(dataSets[attr][i])
                    jstr[i] = String(dataString)
                rec = StringDataRecord(attr, "", jstr)
            else:
                file.close()
                LogStream.logProblem("Unsupported data type detected: " +
                                     str(arrType))
                return None

            if arrType != numpy.string_:
                rec.setName(attr)
                rec.setDimension(2)
                rec.setIntSizes(sizes)
                rec.setGroup("")

        #The dataset is not an array type so examine the data and create the appropriate
        #type of IDataRecord
        else:
            #Creates a FloatDataRecord
            if dataType == numpy.float32 or dataType == numpy.float64:
                arr = numpy.zeros(sz, numpy.float32)
                for i in range(0, sz):
                    arr[i] = dataSets[attr][i]
                rec = FloatDataRecord()
                rec.setFloatData(arr)

            #Creates an IntDataRecord
            elif dataType == numpy.int16 or dataType == numpy.int8 or dataType == numpy.int32:
                arr = numpy.zeros(sz, numpy.int32)
                for i in range(0, sz):
                    arr[i] = dataSets[attr][i]
                rec = IntegerDataRecord()
                rec.setIntData(arr)

            #Creates a StringDataRecord
            elif dataType == numpy.string_:
                jstr = jep.jarray(sz, String)
                for i in range(sz):
                    jstr[i] = String(dataSets[attr][i])
                rec = StringDataRecord(attr, "", jstr)
            else:
                file.close()
                LogStream.logProblem("Unsupported data type detected: " +
                                     str(dataType))
                return None

            # Sets the required data on the IDataRecord.
            # This is already done for for the StringDataRecord
            if dataType != numpy.string_:
                rec.setName(attr)
                rec.setDimension(1)
                rec.setIntSizes(sizes)
                rec.setGroup("")
        recs.add(rec)

    #Close the file
    file.close()
    #Populate the container
    return PointDataContainer.build(ptDataDescription, recs)
Пример #30
0
 def __copyString(self, s, out):
     IOUtils.copy(IOUtils.toInputStream(String(s), "UTF-8"), out)