def test_datetime(self): self.assertEquals(datetime.datetime(2008, 1, 1), Py.newDatetime(Timestamp(108, 0, 1, 0, 0, 0, 0))) self.assertEquals(datetime.datetime(2008, 5, 29, 16, 50, 0), Py.newDatetime(Timestamp(108, 4, 29, 16, 50, 0, 0))) self.assertEquals(datetime.datetime(2008, 5, 29, 16, 50, 1, 1), Py.newDatetime(Timestamp(108, 4, 29, 16, 50, 1, 1000)))
def __makeModule(name, code, path): module = _imp.addModule(name) builtins = _Py.getSystemState().builtins frame = _Frame(code, module.__dict__, module.__dict__, builtins) module.__file__ = path code.call(frame) # execute module code return module
def createMindMap( self ): import org.python.core.Py as Py system_state = Py.getSystemState() sscl = system_state.getClassLoader() try: try: ccl = java.lang.Thread.currentThread().getContextClassLoader() cpath = java.lang.System.getProperty( "java.class.path" ) import leoFreeMindView #Each of these 3 calls play a pivotal role in loading the system java.lang.System.setProperty( "java.class.path", "" ) java.lang.Thread.currentThread().setContextClassLoader( leoFreeMindView.mmcl ) system_state.setClassLoader( leoFreeMindView.mmcl ) self.mm = leoFreeMindView.mindmap( self.c ) self.mindmap = self.mm.mindmapview ml = MListener( self.mm._free_leo_mind.updateMenus ) self.mm._free_leo_mind.getMainMenu().addMenuListener( ml ) except java.lang.Exception, x: x.printStackTrace() swing.JOptionPane.showMessageDialog( None, "Cant Load MindMap View." ) finally: java.lang.System.setProperty( "java.class.path", cpath ) java.lang.Thread.currentThread().setContextClassLoader( ccl ) system_state.setClassLoader( sscl )
def getPyObject(self, set, col, datatype): "Convert Java types into Python ones" if datatype in (Types.VARCHAR, Types.CHAR): return Py.newUnicode(set.getString(col)) elif datatype == Types.TIMESTAMP: # Convert java.sql.TimeStamp into datetime cal = GregorianCalendar() cal.time = set.getTimestamp(col) return datetime.datetime(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH) + 1, cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), cal.get(Calendar.MILLISECOND) * 1000) elif datatype == Types.TIME: # Convert java.sql.Time into time cal = GregorianCalendar() cal.time = set.getTime(col) return datetime.time(cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), cal.get(Calendar.MILLISECOND) * 1000) elif datatype == Types.DATE: # Convert java.sql.Date into datetime cal = GregorianCalendar() cal.time = set.getDate(col) return datetime.date(cal.get(Calendar.YEAR), cal.get(Calendar.MONTH) + 1, cal.get(Calendar.DAY_OF_MONTH)) else: return FilterDataHandler.getPyObject(self, set, col, datatype)
def test_faulty_callback(self): tools.register(self.faulty_callback) tools.register(self.assert_callback) tools.register(self.faulty_callback2) self.count = 0 try: # Suppress the warning otherwise produced from org.python.core import Py from java.util.logging import Level level = Py.setLoggingLevel(Level.SEVERE) eval("42+1") finally: self.assertTrue(tools.unregister(self.faulty_callback)) self.assertTrue(tools.unregister(self.faulty_callback2)) self.assertTrue(tools.unregister(self.assert_callback)) Py.setLoggingLevel(level) self.assertEqual(self.count, 1)
def _thread_state(self): """If we're in the same thread, we need to grab the state from the master Py object. Otherwise we rip it from the thread itself. We'll also want this to be calculated every call to ensure it's the correct reference. """ if Thread.currentThread() is self._target_thread: return Py.getThreadState() else: return getThreadState(self._target_thread)
def finalize_options(self): if self.plat_name is None: self.plat_name = get_platform() else: # plat-name only supported for windows (other platforms are # supported via ./configure flags, if at all). Avoid misleading # other platforms. if os.name != 'nt': raise DistutilsOptionError( "--plat-name only supported on Windows (try " "using './configure --help' on your platform)") plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3]) # Make it so Python 2.x and Python 2.x with --with-pydebug don't # share the same build directories. Doing so confuses the build # process for C modules if hasattr(sys, 'gettotalrefcount'): plat_specifier += '-pydebug' # 'build_purelib' and 'build_platlib' just default to 'lib' and # 'lib.<plat>' under the base build directory. We only use one of # them for a given distribution, though -- if self.build_purelib is None: self.build_purelib = os.path.join(self.build_base, 'lib') if self.build_platlib is None: self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier) # 'build_lib' is the actual directory that we will use for this # particular module distribution -- if user didn't supply it, pick # one of 'build_purelib' or 'build_platlib'. if self.build_lib is None: if self.distribution.ext_modules: self.build_lib = self.build_platlib else: self.build_lib = self.build_purelib # 'build_temp' -- temporary directory for compiler turds, # "build/temp.<plat>" if self.build_temp is None: self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) if self.build_scripts is None: self.build_scripts = os.path.join(self.build_base, 'scripts-' + sys.version[0:3]) if self.executable is None: if not sys.executable is None: self.executable = os.path.normpath(sys.executable) else: from org.python.core import Py self.executable = Py.getDefaultExecutableName()
def compile_command(source, filename="<input>", symbol="single"): r"""Compile a command and determine whether it is incomplete. Arguments: source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "<input>" symbol -- optional grammar start symbol; "single" (default) or "eval" Return value / exceptions raised: - Return a code object if the command is complete and valid - Return None if the command is incomplete - Raise SyntaxError, ValueError or OverflowError if the command is a syntax error (OverflowError and ValueError can be produced by malformed literals). """ if symbol not in ['single','eval']: raise ValueError,"symbol arg must be either single or eval" return Py.compile_command_flags(source,filename,symbol,Py.getCompilerFlags(),0)
def replaceAllByArray(query,data): q = Py.newString(query) spliter = re.compile("[\?]{2}") split = spliter.split(q) sb=[] count = len(split) sb.append(split[0]) idx = 1 while idx<count: sb.append(data[idx-1]) sb.append(split[idx]) idx=idx+1 return ''.join(sb)
def makeClass(self): global _builder print "Entering makeClass", self try: import sys print "sys.path", sys.path # If already defined on sys.path (including CLASSPATH), simply return this class # if you need to tune this, derive accordingly from this class or create another CustomMaker cls = Py.findClass(self.myClass) print "Looked up proxy", self.myClass, cls if cls is None: raise TypeError("No proxy class") except: if _builder: print "Calling super...", self.package cls = CustomMaker.makeClass(self) print "Built proxy", self.myClass else: raise TypeError("FIXME better err msg - Cannot construct class without a defined builder") return cls
def makeClass(self): builder = get_builder() log.debug("Entering makeClass for %r", self) try: import sys log.debug("Current sys.path: %s", sys.path) # If already defined on sys.path (including CLASSPATH), simply return this class # if you need to tune this, derive accordingly from this class or create another CustomMaker cls = Py.findClass(self.myClass) log.debug("Looked up proxy: %r, %r", self.myClass, cls) if cls is None: raise TypeError("No proxy class") except: if builder: log.debug("Calling super... for %r", self.package) cls = CustomMaker.makeClass(self) log.info("Built proxy: %r", self.myClass) else: raise TypeError("Cannot clamp proxy class {} without a defined builder".format(self.myClass)) return cls
def makeClass(self): builder = get_builder() log.debug("Entering makeClass for %r", self) try: import sys log.debug("Current sys.path: %s", sys.path) # If already defined on sys.path (including CLASSPATH), simply return this class # if you need to tune this, derive accordingly from this class or create another CustomMaker cls = Py.findClass(self.myClass) log.debug("Looked up proxy: %r, %r", self.myClass, cls) if cls is None: raise TypeError("No proxy class") except: if builder: log.debug("Calling super... for %r", self.package) cls = CustomMaker.makeClass(self) log.info("Built proxy: %r", self.myClass) else: raise TypeError( "Cannot clamp proxy class {} without a defined builder". format(self.myClass)) return cls
# # Test for bug 1758838 # # execfile(<any file>) should not throw a NullPointerException # # The error only shows up in interactive interpretation (type "single" for the compilation). # But we cannot use InteractiveInterpreter here since it catches all Exceptions, # therefore we do the compilation 'by hand'. # from org.python.core import Py from org.python.core import PySystemState from org.python.util import PythonInterpreter PySystemState.initialize() interp = PythonInterpreter() code = Py.compile_command_flags("execfile('test401/to_be_executed.py')", "<input>", "single", None, 1) interp.exec(code)
def __call__(self, source, filename, symbol): symbol = CompileMode.getMode(symbol) return Py.compile_flags(source, filename, symbol, self._cflags)
def findClass(c): return Py.findClassEx(c, "java class")
def __call__(self, src, filename): code = Py.compile_flags(src, filename, CompileMode.exec, self.cflags) return code
def getSystemState(self): from org.python.core import Py if not self.realInterp: return Py.getSystemState() return self.realInterp.getSystemState()
proppath = g.os_path_join( g.app.loadDir, "..", "plugins", "spellingdicts", "which.txt") #we start to determine which dictionary to use fis = io.FileInputStream(io.File(proppath)) properties = util.Properties() properties.load(fis) fis.close() fis = None lfile = properties.getProperty("dict") dpath = g.os_path_join(g.app.loadDir, "..", "plugins", "spellingdicts", lfile) dictionary = SpellDictionaryHashMap(io.File(dpath)) import org.python.core.Py as Py #now we descend into the Jython internals... sstate = Py.getSystemState() cloader = sstate.getClassLoader() sstate.setClassLoader( clb2 ) #we do this so the JyLeoSpellChecker class can implement SpellCheckListener, otherwise it fails except java.lang.Exception: load_ok = False if load_ok: #@ <<JyLeoSpellChecker>> #@+node:zorcanda!.20051111215311.1:<<JyLeoSpellChecker>> class JyLeoSpellChecker(SpellCheckListener): def __init__(self, editor): self.c = editor.c self.editor = editor.editor
def _char_slice_to_unicode(self, characters, start, length): """Convert a char[] slice to a PyUnicode instance""" text = Py.newUnicode(String(characters[start:start + length])) return text
def getJythonBinDir(): if not sys.executable is None: return os.path.dirname(os.path.realpath(sys.executable)) else: return Py.getDefaultBinDir()
def getSystemState(self): if not self.realInterp: return Py.getSystemState() return self.realInterp.getSystemState()
def replace(query,data): q = Py.newString(query) spliter = re.compile("[\?]{2}") split = spliter.split(q) return split[0]+data+split[1]
clb2.importClass( "SpellDictionaryHashMap", "com.swabunga.spell.engine.SpellDictionaryHashMap" ) #clb2.importClass( "SpellDictionaryCachedDichoDisk", "com.swabunga.spell.engine.SpellDictionaryCachedDichoDisk" ) clb2.importClass( "StringWordTokenizer", "com.swabunga.spell.event.StringWordTokenizer" ) proppath = g.os_path_join( g.app.loadDir, "..", "plugins", "spellingdicts", "which.txt" ) #we start to determine which dictionary to use fis = io.FileInputStream( io.File( proppath ) ) properties = util.Properties() properties.load( fis ) fis.close() fis = None lfile = properties.getProperty( "dict" ) dpath = g.os_path_join( g.app.loadDir, "..", "plugins", "spellingdicts", lfile ) dictionary = SpellDictionaryHashMap( io.File( dpath ) ) import org.python.core.Py as Py #now we descend into the Jython internals... sstate = Py.getSystemState() cloader = sstate.getClassLoader() sstate.setClassLoader( clb2 )#we do this so the JyLeoSpellChecker class can implement SpellCheckListener, otherwise it fails except java.lang.Exception: load_ok = False if load_ok: #@ <<JyLeoSpellChecker>> #@+node:zorcanda!.20051111215311.1:<<JyLeoSpellChecker>> class JyLeoSpellChecker( SpellCheckListener ): def __init__( self, editor ): self.c = editor.c self.editor = editor.editor self.foldprotection = editor.foldprotection
def test_date(self): self.assertEquals(datetime.date(2008, 5, 29), Py.newDate(Date(108, 4, 29))) self.assertEquals(datetime.date(1999, 1, 1), Py.newDate(Date(99, 0, 1)))
def renderMap(self, connection_parameters=None, sql=None, geom_id_entity=None, geom_entity=None, data_entity=None, map_parameters={}): # Put connection parameters into a java HashMap. params_hashmap = HashMap() for param, value in connection_parameters.items(): if value: params_hashmap.put(param, value) # Get data store. data_store = DataStoreFinder.getDataStore(params_hashmap) # Create VirtualTable from sql. vtable = VirtualTable("vtable", sql) # Set primary key. vtable.setPrimaryKeyColumns([geom_id_entity['ID']]) # metadatata = intententional typo. GT needs to fix the name. vtable.addGeometryMetadatata(geom_entity['ID'], JPolygon, 4326) # Create feature source from virtual table. data_store.addVirtualTable(vtable) feature_source = data_store.getFeatureSource("vtable") # Add styling classes if there was a value entity. if data_entity: # Generate class bounds. num_classes = data_entity.get('num_classes', 25) vmin = float(data_entity.get('min', 0)) vmax = float(data_entity.get('max', 1)) vrange = vmax - vmin class_width = vrange/num_classes classes = [(None, vmin)] for i in range(num_classes): classes.append((vmin + i * class_width, vmin + (i + 1) * class_width)) classes.append((vmax, None)) # Generate style rules for classes. rules = [] for c in classes: rule = self.create_rule(c[0], c[1], vmin, vrange, attr=data_entity['ID']) rules.append(rule) feature_type_style = self.style_factory.createFeatureTypeStyle(rules) style = self.style_factory.createStyle() style.featureTypeStyles().add(feature_type_style) else: style = None # Setup map. gt_map = DefaultMapContext() gt_map.addLayer(feature_source, style) gt_renderer = StreamingRenderer() gt_renderer.setMapContent(gt_map) image_bounds = Rectangle(0, 0, map_parameters.get('WIDTH', 100), map_parameters.get('HEIGHT', 100)) # Set image type based on format. image_format = map_parameters.get('FORMAT', 'image/png') if image_format == 'image/jpeg': image_type = BufferedImage.TYPE_INT_RGB else: image_type = BufferedImage.TYPE_INT_ARGB buffered_image = BufferedImage(image_bounds.width, image_bounds.height, image_type) graphics = buffered_image.createGraphics() # Set background color if not transparent. if not map_parameters.get('TRANSPARENT'): graphics.setPaint(Color.WHITE) graphics.fill(image_bounds) crs = CRS.decode(map_parameters.get('SRS', "EPSG:4326")) bbox = map_parameters.get('BBOX', '-180,-90,180,90') coords = [float(coord) for coord in bbox.split(",")] map_bounds = ReferencedEnvelope(coords[0], coords[2], coords[1], coords[3], crs) gt_renderer.paint(graphics, image_bounds, map_bounds) # Release the JDBC connection and map content. data_store.dispose() gt_renderer.getMapContent().dispose() # Return raw image. byte_array_output_stream = ByteArrayOutputStream() informal_format = re.match('image/(.*)', image_format).group(1) ImageIO.write(buffered_image, informal_format, byte_array_output_stream) byte_array = byte_array_output_stream.toByteArray() raw_image = Py.newString(StringUtil.fromBytes(byte_array)) return raw_image
def test_time(self): self.assertEquals(datetime.time(0, 0, 0), Py.newTime(Time(0, 0, 0))) self.assertEquals(datetime.time(23, 59, 59), Py.newTime(Time(23, 59, 59)))
def __call__(self, source, filename, symbol): return Py.compile_flags(source, filename, symbol, self._cflags)