def patch_set(self, attribute, data): ''' Set patch attributes from a Pandas dataframe :param attribute: valid NetLogo patch attribute :param data: Pandas dataframe with same dimensions as NetLogo world :raises: NetLogoException ''' try: np.set_printoptions(threshold=np.prod(data.shape)) datalist = '['+str(data.as_matrix().flatten()).strip('[ ') datalist = ' '.join(datalist.split()) if self.NL_VERSION == '6.0': command = '(foreach map [[?1] -> [pxcor] of ?1] sort patches map [[?2] -> [pycor] of ?2] \ sort patches {0} [[?1 ?2 ?3 ] -> ask patch ?1 ?2 [set {1} ?3]])'.format(datalist, attribute) else: command = '(foreach map [[pxcor] of ?] sort patches map [[pycor] of ?] \ sort patches {0} [ask patch ?1 ?2 [set {1} ?3]])'.format(datalist, attribute) self.link.command(command) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.Exception) as ex: raise NetLogoException(ex.message())
def load_from_html_cascade(filename, binary=False): try: result = load_from_html(filename, binary=binary) except jpype.JException(java.lang.StackOverflowError) as e: logging.warn('boilerpipe exception: {}'.format(filename)) result = load_from_html(filename, use_boilerpipe=False, binary=binary) return result
def executeQuery(sql, **kwargs): connect, statement, rs = None, None, None try: if not sql or sql.isspace(): print("executeQuery():执行SQL为空,请检查") return connect = DataConn.getConnect() statement = connect.createStatement() if kwargs: sql = sql.format(**kwargs) logging.debug("executeQuery():执行语句-->" + sql) resultSet = statement.executeQuery(sql) metaData = resultSet.getMetaData() columns = metaData.getColumnCount() rsultList = [] while resultSet.next(): resultDict = {} for i in range(columns): columnName = metaData.getColumnName(i + 1) colValue = resultSet.getObject(i + 1) resultDict[columnName] = convent_data_type(colValue) rsultList.append(resultDict) return rsultList except jpype.JException() as exception: exception.stacktrace finally: if rs: rs.close() if statement: statement.close() if connect: connect.close()
def _get_connection(self, server, user, password, service, jdbc_driver, tags): try: # Check if the instantclient is available cx_Oracle.clientversion() except cx_Oracle.DatabaseError as e: # Fallback to JDBC use_oracle_client = False self.log.debug( 'Oracle instant client unavailable, falling back to JDBC: {}'. format(e)) connect_string = self.JDBC_CONNECT_STRING.format(server, service) else: use_oracle_client = True self.log.debug('Running cx_Oracle version {0}'.format( cx_Oracle.version)) connect_string = self.CX_CONNECT_STRING.format( user, password, server, service) try: if use_oracle_client: con = cx_Oracle.connect(connect_string) else: try: if jpype.isJVMStarted( ) and not jpype.isThreadAttachedToJVM(): jpype.attachThreadToJVM() jpype.java.lang.Thread.currentThread( ).setContextClassLoader( jpype.java.lang.ClassLoader.getSystemClassLoader()) con = jdb.connect(self.ORACLE_DRIVER_CLASS, connect_string, [user, password], jdbc_driver) except jpype.JException(jpype.java.lang.RuntimeException) as e: if "Class {} not found".format( self.ORACLE_DRIVER_CLASS) in str(e): msg = """Cannot run the Oracle check until either the Oracle instant client or the JDBC Driver is available. For the Oracle instant client, see: http://www.oracle.com/technetwork/database/features/instant-client/index.html You will also need to ensure the `LD_LIBRARY_PATH` is also updated so the libs are reachable. For the JDBC Driver, see: http://www.oracle.com/technetwork/database/application-development/jdbc/downloads/index.html You will also need to ensure the jar is either listed in your $CLASSPATH or in the yaml configuration file of the check. """ self.log.error(msg) raise self.log.debug("Connected to Oracle DB") self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=tags) except Exception as e: self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=tags) self.log.error(e) raise return con
def command(self, netlogo_command): ''' Execute the supplied command in NetLogo :param netlogo_command: a string with a valid netlogo command :raises: NetLogoException in case of either a LogoException or CompilerException being raised by netlogo. ''' try: self.link.command(netlogo_command) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.nvm.EngineException) as ex: raise NetLogoException(ex.message())
def _get_token_analysis(self, word_analyses, index): try: token_analysis = word_analyses[index] except jp.JException(self.index_out_of_bounds_exception): return None, None best_analysis = token_analysis.getBestAnalysis() token = token_analysis.getWordAnalysis().getInput() return token, best_analysis
def patch_report(self, netlogo_reporter): ''' Reports a Pandas dataframe with the same shape as the NetLogo world, with column labels and row indices following pxcor and pycor patch coordinates. The values of the dataframe correspond to patch attributes. :param netlogo_reporter: valid patch attribute :raises: NetlogoException :returns: Dataframe containing NetLogo patch attributes ''' try: extents = self.link.report( '(list min-pxcor max-pxcor min-pycor max-pycor)') extents = self._cast_results(extents).astype(int) results_df = pd.DataFrame(index=range(extents[2], extents[3] + 1, 1), columns=range(extents[0], extents[1] + 1, 1)) results_df.sort_index(ascending=False, inplace=True) if self.NL_VERSION == '6.0': resultsvec = self.link.report( 'map [[?1] -> [{0}] of ?1] sort patches'.format( netlogo_reporter)) else: resultsvec = self.link.report( 'map [[{0}] of ?] sort patches'.format(netlogo_reporter)) resultsvec = self._cast_results(resultsvec) results_df.ix[:, :] = resultsvec.reshape(results_df.shape) return results_df except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException( jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.Exception) as ex: raise NetLogoException(ex.message())
def report(self, netlogo_reporter): ''' Every reporter (commands which return a value) that can be called in the NetLogo Command Center can be called with this method. :param netlogo_reporter: a valid netlogo reporter :raises: NetlogoException ''' try: result = self.link.report(netlogo_reporter) return self._cast_results(result) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.Exception) as ex: raise NetLogoException(ex.message())
def repeat_report(self, netlogo_reporter, reps): ''' Execute a reporter (or multiple reporters if a list of strings is given as input) over a number of ticks. :param netlogo_reporter: valid NetLogo reporters (string or list of strings) :param reps: number of ticks for which to return values :raises: NetLogoException :returns: Dataframe of reported values indexed by ticks, with columns for each reporter. ''' if isinstance(netlogo_reporter, str): cols = [netlogo_reporter] elif isinstance(netlogo_reporter, list): cols = netlogo_reporter else: raise NetLogoException("Unknown datatype") results_df = pd.DataFrame(columns=cols) for _ in range(reps): for reporter in results_df.columns: try: tick = self._cast_results(self.link.report('ticks')) result = self.link.report(reporter) results_df.loc[tick, reporter] = self._cast_results(result) except jpype.JException( jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException( jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.Exception) as ex: raise NetLogoException(ex.message()) self.link.command('go') return results_df
def repeat_command(self, netlogo_command, reps): ''' Execute the supplied command in NetLogo a given number of times :param netlogo_command: a string with a valid NetLogo command :param reps: number of times to repeat commands :raises: NetLogoException in case of either a LogoException or CompilerException being raised by NetLogo. ''' try: commandstr = 'repeat {0} [{1}]'.format(reps, netlogo_command) self.link.command(commandstr) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.nvm.EngineException) as ex: raise NetLogoException(ex.message())
def load_model(self, path): ''' Load a NetLogo model. :param path: the absolute path to the NetLogo model :raise: IOError in case the model is not found :raise: NetLogoException wrapped around NetLogo exceptions. ''' try: self.link.loadModel(path) except jpype.JException(jpype.java.io.IOException) as ex: raise IOError(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.InterruptedException) as ex: raise NetLogoException(ex.message()) else: debug('loaded model successfully')
def load_model(self, path): ''' load a netlogo model. :param path: the absolute path to the netlogo model :raise: IOError in case the model is not found :raise: NetLogoException wrapped arround netlogo exceptions. ''' if not os.path.isfile(path): raise EMAError('{} is not a file'.format(path)) try: self.link.loadModel(path) except jpype.JException(jpype.java.io.IOException) as ex: raise IOError(ex.message()) except jpype.JException(jpype.java.org.nlogo.api.LogoException) as ex: raise NetLogoException(ex.message()) except jpype.JException( jpype.java.org.nlogo.api.CompilerException) as ex: raise NetLogoException(ex.message()) except jpype.JException(jpype.java.lang.InterruptedException) as ex: raise NetLogoException(ex.message())
def executeByConn(connect, sql, kwargs): statement, rs = None, None try: if not sql or sql.isspace(): logging.debug("execute():执行SQL为空,请检查") return if ";" in sql: logging.debug("execute():执行SQL带有';'号,请检查") return statement = connect.createStatement() if kwargs: sql = sql.format(**kwargs) logging.debug("execute():执行语句-->" + sql) resultSet = statement.executeUpdate(sql) return resultSet except jpype.JException() as exception: exception.stacktrace finally: if rs: rs.close() if statement: statement.close()
def execute(sql, **kwargs): connect, statement, rs = None, None, None try: if not sql or sql.isspace(): print("execute():执行SQL为空,请检查") return if ";" in sql: return DataConn.executeBatch(sql, **kwargs) connect = DataConn.getConnect() statement = connect.createStatement() if kwargs: sql = sql.format(**kwargs) logging.debug("execute():执行语句-->" + sql) resultSet = statement.executeUpdate(sql) return resultSet except jpype.JException() as exception: exception.stacktrace finally: if rs: rs.close() if statement: statement.close() if connect: connect.close()
def __outputTrees(parseTrees, outputData, parser, opts, line, sentenceID): ''' Write tree information to outputData. This is a reimplementation of the private method of the same name from BerkeleyParser.java. ''' # todo cleanup? delimiter = "\t" if opts.ec_format: newList = [] for parsedTree in parseTrees: if parsedTree.getChildren().isEmpty(): continue if parser.getLogLikelihood(parsedTree) != float("-inf"): newList.append(parsedTree) parseTrees = newList outputData.write("%s\t%s\n" % (len(parseTrees), sentenceID)) delimiter = ",\t" for parsedTree in parseTrees: addDelimiter = False if opts.tree_likelihood: treeLL = float("-inf") if parsedTree.getChildren().isEmpty() \ else parser.getLogLikelihood(parsedTree) if treeLL == float("-inf"): continue outputData.write("%s"%treeLL) addDelimiter = True if opts.sentence_likelihood: allLL = float("-inf") if parsedTree.getChildren().isEmpty() \ else parser.getLogLikelihood() if addDelimiter: outputData.write(delimiter) addDelimiter = True if opts.ec_format: outputData.write("sentenceLikelihood ") outputData.write("%s"%allLL) if not opts.binarize: TreeAnnotations = jpype.JClass("edu.berkeley.nlp.PCFGLA.TreeAnnotations") parsedTree = TreeAnnotations.unAnnotateTree(parsedTree, opts.keepFunctionLabels) if opts.confidence: treeLL = float("-inf") if parsedTree.getChildren().isEmpty() \ else parser.getConfidence(parsedTree) if addDelimiter: outputData.write(delimiter) addDelimiter = True if opts.ec_format: outputData.write("confidence ") outputData.write("%s"%treeLL) elif opts.modelScore: score = float("-inf") if parsedTree.getChildren().isEmpty() \ else parser.getModelScore(parsedTree) if addDelimiter: outputData.write(delimiter) addDelimiter = True if opts.ec_format: outputData.write("maxRuleScore ") outputData.write("%.8f"%score) if opts.ec_format: outputData.write("\n") elif addDelimiter: outputData.write(delimiter) if not parsedTree.getChildren().isEmpty(): treeString = parsedTree.getChildren().get(0).toString() if len(parsedTree.getChildren()) != 1: sys.stderr.write("ROOT has more than one child!") parsedTree.setLabel("") treeString = parsedTree.toString() if opts.ec_format: outputData.write("(S1 " + treeString + " )\n"); else: outputData.write("( " + treeString + " )\n"); else: outputData.write("(())\n") if opts.render: try: writeTreeToImage(parsedTree, re.sub("[^a-zA-Z]", "", line) + ".png") except jpype.JException(java.lang.RuntimeException), ex: #todo actually test this exception handling print "Caught the runtime exception : ", JavaException.message() print JavaException.stackTrace()
def _get_next_word(self, word_list, index): try: return word_list[index] except jp.JException(self.index_out_of_bounds_exception): return None
def initialize(classpath, parameters): global INCOMING, OUTGOING, BOTH,\ BREADTH_FIRST, DEPTH_FIRST,\ NotFoundException,NotInTransactionException,DeadlockDetectedException,\ RelationshipType, Evaluator, IndexService, FulltextIndexService, FulltextQueryIndexService, SORT_RELEVANCE, SORT_INDEXORDER, \ ALL, ALL_BUT_START_NODE, END_OF_GRAPH, StopAtDepth,\ array, to_java, to_python, tx_join, make_map,\ Node, Relationship, NativeRelType jvm = parameters.pop('jvm', None) if jvm is None: jvm = jpype.getDefaultJVMPath() args = [] if 'ext_dirs' in parameters: args.append('-Djava.ext.dirs=' + ':'.join(parameters['ext_dirs'])) args.append('-Djava.class.path=' + ':'.join(classpath)) heap_size = parameters.pop('heap_size', None) if heap_size is not None: if is_integer(heap_size) or heap_size[-1].isdigit(): heap_size = '%sM' % heap_size # default to megabyte args.append('-Xmx' + heap_size) jpype.startJVM(jvm, *args) apache_sort = jpype.JPackage('org').apache.lucene.search.Sort SORT_INDEXORDER = apache_sort.INDEXORDER SORT_RELEVANCE = apache_sort.RELEVANCE core = jpype.JPackage('org').neo4j.graphdb kernel_impl = jpype.JPackage('org').neo4j.kernel.impl INCOMING = core.Direction.INCOMING OUTGOING = core.Direction.OUTGOING BOTH = core.Direction.BOTH Order = getattr(core, 'Traverser$Order') Stop = core.StopEvaluator Returnable = core.ReturnableEvaluator BREADTH_FIRST = Order.BREADTH_FIRST DEPTH_FIRST = Order.DEPTH_FIRST ALL = Returnable.ALL ALL_BUT_START_NODE = Returnable.ALL_BUT_START_NODE END_OF_GRAPH = Stop.END_OF_GRAPH NotFoundException = jpype.JException(core.NotFoundException) NotInTransactionException = jpype.JException(core.NotInTransactionException) DeadlockDetectedException = jpype.JException( kernel_impl.transaction.DeadlockDetectedException) Node = core.Node Relationship = core.Relationship NativeRelType = core.RelationshipType try: EmbeddedGraphDb = jpype.JClass("org.neo4j.kernel.EmbeddedGraphDatabase") except: EmbeddedGraphDb = None try: RemoteGraphDb = jpype.JClass("org.neo4j.remote.RemoteGraphDatabase") except: RemoteGraphDb = None try: FulltextIndexService = jpype.JClass("org.neo4j.index.lucene.LuceneFulltextIndexService") except: FulltextIndexService = None try: FulltextQueryIndexService = jpype.JClass("org.neo4j.index.lucene.LuceneFulltextQueryIndexService") except: FulltextQueryIndexService = None try: IndexService = jpype.JClass("org.neo4j.index.lucene.LuceneIndexService") except: try: IndexService = jpype.JClass("org.neo4j.index.NeoIndexService") except: IndexService = None HashMap = jpype.java.util.HashMap def make_map(d): result = HashMap() for key, value in d.items(): result.put(key, value) return result def tx_join(): if not jpype.isThreadAttachedToJVM(): jpype.attachThreadToJVM() def array(lst): return lst def to_java(obj): return obj def to_python(obj): return obj rel_types = {} def RelationshipType(name): if name in rel_types: return rel_types[name] else: rel_types[name] = type = jpype.JProxy(core.RelationshipType,dict={ 'name': lambda:name }) return type def StopAtDepth(limit): limit = int(limit) assert limit > 0, "Illegal stop depth." if limit == 1: return core.StopEvaluator.DEPTH_ONE else: return jpype.JProxy(Stop, dict={ 'isStopNode': lambda pos: limit <= pos.depth() }) class Evaluator(object): def __init__(self): self.stop = jpype.JProxy(Stop, inst=self) self.returnable = jpype.JProxy(Returnable, inst=self) return EmbeddedGraphDb, RemoteGraphDb
# Possibly change this for your system dll = { 'linux': '/jre/lib/amd64/server/libjvm.so', 'darwin': '/jre/lib/server/libjvm.dylib', 'win32': '/jre/bin/server/jvm.dll' } try: import jpype, os # @UnresolvedImport jpype.startJVM(os.environ['JAVA_HOME'] + dll[sys.platform], '-ea', '-Xmx5g', '-Djava.class.path=lib/compling.core.jar') compling = jpype.JPackage('compling') SlotChain = getattr(compling.grammar.unificationgrammar, 'UnificationGrammar$SlotChain') getParses = compling.gui.util.Utils.getParses ParserException = jpype.JException( compling.parser.ParserException) # @UnusedVariable ECGAnalyzer = compling.parser.ecgparser.ECGAnalyzer getDfs = compling.grammar.unificationgrammar.FeatureStructureUtilities.getDfs # @UnusedVariable except ImportError: from compling.grammar.unificationgrammar.UnificationGrammar import SlotChain from compling.gui.util.Utils import getParses from compling.parser import ParserException # @UnusedImport from compling.parser.ecgparser import ECGAnalyzer from compling.grammar.unificationgrammar.FeatureStructureUtilities import getDfs # @UnusedImport class Analyzer(object): def __init__(self, prefs): self.analyzer = ECGAnalyzer(prefs) self.grammar = self.analyzer.grammar self.server = None