def ig_ref_search_2file(filename, line, col, toplevelpath, destinationfile): sf = SourceFile(File(filename)) tlp = ToplevelPath(toplevelpath) location = SourceLocation(sf, line, col) start = System.currentTimeMillis() res = SourceLocation2IG.findNearestItem(location, tlp, project) if res == None: printf('No IG item found at %s:%s,%s', filename, line, col) return item = res.getFirst() if item == None: printf('Failed to find nearest IG Item') return path = res.getSecond() rs = IGReferencesSearch(project) if isinstance(item, IGOperationObject): item = item.getObject() result = rs.search(item, path, True, True, False, False) time = System.currentTimeMillis() - start out = PrintStream(File(destinationfile)) printf('Found %s references of %s in %s ms.', result.countRefs(), item, time) out.printf('Found %s references of %s in %s ms:\n', result.countRefs(), item, time) result.dump(0, out)
def client_run(self): cl_is = Scanner(self.s.getInputStream()) cl_os = PS(self.s.getOutputStream()) cl_os.println(self.Sc.nextLine()) print(cl_is.nextLine()) print(cl_is.nextLine()) while True: cl_os.println(self.Sc.nextLine()) print(cl_is.nextLine())
def run(self): self.cs_IS = Scanner(self.s.getInputStream()) self.cs_OS = PrintStream(self.s.getOutputStream()) self.cs_OS.println("breaches are open...") x = self.cs_IS.nextLine() self.cs_OS.println(x + " server received") while True: x = self.cs_IS.nextLine() self.cs_OS.println(x + " server received")
def __init__(self, stdout=True): if stdout: self._original = System.out self._set_stream = System.setOut else: self._original = System.err self._set_stream = System.setErr self._bytes = ByteArrayOutputStream() self._stream = PrintStream(self._bytes, False, 'UTF-8') self._set_stream(self._stream)
def compile(wd,args): print "changing working directory to %s" % wd System.setProperty("user.dir",wd); print args outstream = ByteArrayOutputStream() errstream = ByteArrayOutputStream() System.setOut(PrintStream(outstream)) System.setErr(PrintStream(errstream)) try: Tom.exec(config + args) except: pass return str((outstream.toString(),errstream.toString()))
def run(self): print("broad time...") if len(self.reg_obj_ip) > reg.i: reg.i += 5 for k in self.reg_obj_ip: if k.isConnected() and not (k.isClosed()): K = PrintStream(k.getOutputStream()) K.println("BroadCast Message....") else: self.reg_obj_ip.remove(k) else: print("here") Thread.sleep(5000)
def astral_tree(args): astral, input_file, output_file = args print('ASTRAL processing %s has begun' % input_file) jpype.startJVM(jpype.getDefaultJVMPath(), '-Djava.class.path=%s' % astral, convertStrings=False) from java.lang import System from java.io import PrintStream, File System.setOut(PrintStream(File('/dev/null'))) System.setErr(PrintStream(File('/dev/null'))) jpype.imports.registerDomain('phylonet') from phylonet.coalescent import CommandLine CommandLine.main(['-i', input_file, '-o', output_file]) jpype.shutdownJVM()
def createOutput(outfile): try: fout = FileOutputStream(outfile) return PrintStream(fout) except IOException: print "Error opening output file" return None
def test_faulty_callback(self): import java.lang.System as Sys import java.io.PrintStream as PrintStream import java.io.OutputStream as OutputStream class NullOutputStream(OutputStream): def write(self, b): pass def write(self, buf, offset, len): pass syserr = Sys.err Sys.setErr(PrintStream(NullOutputStream())) tools.register(self.faulty_callback) tools.register(self.assert_callback) tools.register(self.faulty_callback2) self.count = 0 try: eval("42+1") finally: self.assertTrue(tools.unregister(self.faulty_callback)) self.assertTrue(tools.unregister(self.faulty_callback2)) self.assertTrue(tools.unregister(self.assert_callback)) Sys.setErr(syserr) self.assertEqual(self.count, 1)
def __init__(self, jsrc_path): """ :param jsrc_path: (str) Path that contains compiled reeb_graph java project (https://github.com/dbespalov/reeb_graph) """ self.jsrc_path = jsrc_path if not jpype.isJVMStarted(): jpype.startJVM(classpath=[jsrc_path], convertStrings=True) elif not jpype.isThreadAttachedToJVM(): jpype.attachThreadToJVM() # These imports are activated by jpype after starting the JVM from java.lang import System from java.io import PrintStream, File # Disable java output. System.setOut(PrintStream( File('/dev/null'))) # NUL for windows, /dev/null for unix self.erg = jpype.JClass('ExtractReebGraph')() self.crg = jpype.JClass('CompareReebGraph')() # Set defaults self.params = ['4000', '0.005', str(2**7), '0.5'] # the reeb project tries to save a file in the working directory -> redirect to tmp briefly wd = Path.cwd() os.chdir('/tmp') self.erg.main(self.params[:3]) self.crg.main(self.params) try: (Path.cwd() / 'log_{}_{}_{}_{}'.format(*self.params)).unlink() except FileNotFoundError: pass os.chdir(str(wd))
def __check_update_require_domain_restart(model_context): exit_code = 0 try: # First we enable the stdout again and then redirect the stdoout to a string output stream # call isRestartRequired to get the output, capture the string and then silence wlst output again # __wlst_helper.enable_stdout() sostream = StringOutputStream() System.setOut(PrintStream(sostream)) restart_required = __wlst_helper.is_restart_required() is_restartreq_output = sostream.get_string() __wlst_helper.silence() if model_context.is_rollback_if_restart_required( ) and restart_required: __wlst_helper.cancel_edit() __logger.severe('WLSDPLY_09015', is_restartreq_output) exit_code = CommandLineArgUtil.PROG_ROLLBACK_IF_RESTART_EXIT_CODE else: __wlst_helper.save() __wlst_helper.activate() if restart_required: exit_code = CommandLineArgUtil.PROG_RESTART_REQUIRED except BundleAwareException, ex: __release_edit_session_and_disconnect() raise ex
def printError(self, e): from org.python.core import PyException from java.io import ByteArrayOutputStream, PrintStream import traceback if isinstance(e, PyException): self.reflectChanges = 0 out = ByteArrayOutputStream() e.printStackTrace(PrintStream(out)) self.addOutput(self.errorColor, u"\n%s" % unicode(out)[:-1])
class JavaCapturer(object): def __init__(self, stdout=True): if stdout: self._original = System.out self._set_stream = System.setOut else: self._original = System.err self._set_stream = System.setErr self._bytes = ByteArrayOutputStream() self._stream = PrintStream(self._bytes, False, 'UTF-8') self._set_stream(self._stream) def release(self): # Original stream must be restored before closing the current self._set_stream(self._original) self._stream.close() output = self._bytes.toString('UTF-8') self._bytes.reset() return output
class JavaCapturer(object): def __init__(self, stdout=True): if stdout: self._original = System.out self._set_stream = System.setOut else: self._original = System.err self._set_stream = System.setErr self._bytes = ByteArrayOutputStream() self._stream = PrintStream(self._bytes, False, "UTF-8") self._set_stream(self._stream) def release(self): # Original stream must be restored before closing the current self._set_stream(self._original) self._stream.close() output = self._bytes.toString("UTF-8") self._bytes.reset() return output
def __init__(self, *args, **kwargs): super(VTLKernel, self).__init__(*args, **kwargs) startJVM() from java.lang import System from java.io import ByteArrayOutputStream from java.io import PrintStream self.baos = ByteArrayOutputStream() ps = PrintStream(self.baos) System.setOut(ps) System.setErr(ps) it = JPackage('it') self.VTLSession = it.bancaditalia.oss.vtl.config.ConfigurationManager.getDefault().createSession()
def _load_tree(self): # capture all error messages from java.lang import System #@UnresolvedImport from java.io import PrintStream #@UnresolvedImport oldErr = System.err System.setErr(PrintStream(_NoOutputStream())) #@UndefinedVariable tree = None try: tree = self.ldr.loadTree() finally: System.setErr(oldErr) #@UndefinedVariable return tree
def run(self): st_os = PrintStream(self.so.getOutputStream()) st_os.println(self.li) print ("writting complete") st_is = Scanner(self.so.getInputStream()) while st_is.hasNext(): print (st_is.nextLine()) st_os.close() st_is.close() self.so.close()
def _runCommand(cmd, runDir=None): if runDir is not None: currWorkingDir = os.getcwd() os.chdir(runDir) cmd = shlex.split(cmd) bosErr = ByteArrayOutputStream() origStdErr = System.err System.setErr(PrintStream(bosErr)) process = subprocess.Popen(cmd, stdout=subprocess.PIPE) System.setErr(origStdErr) if runDir is not None: os.chdir(currWorkingDir) if process.wait() != 0: errorStr = "Failed to call '%s'. Error: %s" % ( cmd, String(bosErr.toByteArray())) _fail(errorStr) return process.communicate()[0]
def load(self, warn=True): # capture all error messages from java.lang import System #@UnresolvedImport from java.io import PrintStream #@UnresolvedImport oldErr = System.err System.setErr(PrintStream(_NoOutputStream())) #@UndefinedVariable tree = None try: tree = self.ldr.loadTree() finally: System.setErr(oldErr) #@UndefinedVariable if tree is None: raise io_exception, "No tree found" # convert tree to own tree r = tree.getNodeLink() pool = dict() return self._copynode(pool, r)
class multi_client(Thread): def __init__(self, s): self.s = s self.cs_IS = None self.cs_OS = None def run(self): self.cs_IS = Scanner(self.s.getInputStream()) self.cs_OS = PrintStream(self.s.getOutputStream()) self.cs_OS.println("breaches are open...") x = self.cs_IS.nextLine() self.cs_OS.println(x + " server received") while True: x = self.cs_IS.nextLine() self.cs_OS.println(x + " server received")
def alt_handle(self): try: raw_xml = self.rfile.readline().strip() marc_xmlfile = NamedTemporaryFile(delete=False) marc_xmlfile.write(raw_xml) marc_xmlfile.close() base_uri = INFO.get('base_uri','http://catalog/') args = [INFO.get('saxon_xqy'), 'marcxmluri={}'.format( os.path.normpath(marc_xmlfile.name).replace("\\", "/")), 'baseuri={}'.format(INFO.get('base_uri', 'http://catalog/')), 'serialization=rdfxml'] query = saxon.Query() output_stream = ByteArrayOutputStream() System.setOut(PrintStream(output_stream)) query.main(args) self.wfile.write(output_stream.toString().encode('ascii', errors='ignore')) os.remove(marc_xmlfile.name) except: self.wfile.write("Error processing MARC XML:\n\t{}".format(sys.exc_info()[0]))
def __init__(self, max_iterations=2, grammar='grammar.yaml'): super().__init__() # noinspection PyUnresolvedReferences # startJVM is the right function jpype.startJVM(classpath=[ str( Path(__file__).parent / "test_starlib_mcts-1.0-SNAPSHOT-all.jar") ]) jpype.imports.registerDomain("isml.aidev") # noinspection PyUnresolvedReferences from java.lang import System # noinspection PyUnresolvedReferences from java.io import PrintStream, File # noinspection PyUnresolvedReferences System.setErr(PrintStream(File("ailibs.log"))) # # noinspection PyUnresolvedReferences from isml.aidev import Algorithm self.algo = Algorithm(max_iterations, grammar) self._initialized = True
def online_check_save_activate(model_context): """ For online update and deploy, check if restart is required, then cancel or save and activate. :param model_context: used to perform checks :return: the exit code for the tool :raises BundleAwareException: if an error occurs during the process """ _method_name = 'online_check_save_activate' exit_code = 0 try: # First we enable the stdout again and then redirect the stdoout to a string output stream # call isRestartRequired to get the output, capture the string and then silence wlst output again _wlst_helper.enable_stdout() sostream = StringOutputStream() System.setOut(PrintStream(sostream)) restart_required = _wlst_helper.is_restart_required() is_restartreq_output = sostream.get_string() _wlst_helper.silence() if model_context.is_cancel_changes_if_restart_required() and restart_required: _wlst_helper.cancel_edit() _logger.warning('WLSDPLY_09015', is_restartreq_output) exit_code = CommandLineArgUtil.PROG_CANCEL_CHANGES_IF_RESTART_EXIT_CODE list_non_dynamic_changes(model_context, is_restartreq_output) else: _wlst_helper.save() _wlst_helper.activate(model_context.get_model_config().get_activate_timeout()) if restart_required: exit_code = CommandLineArgUtil.PROG_RESTART_REQUIRED list_non_dynamic_changes(model_context, is_restartreq_output) exit_code = list_restarts(model_context, exit_code) except BundleAwareException, ex: release_edit_session_and_disconnect() raise ex
def dump_patterns(inputs,outputs,outfile,calibration_percentage=0.75,skip = 365,append=0, fast_write=0): ''' dump_patterns(inputs,outputs,outfile,calibration_percentage=0.75,append=0): dumps the patterns to the given outfile. outfile.cal which contains calibration% of the points outfile.ver which contains validation% = 100 - calibration% of the points append=1 appends to the previous .cal and .ver files if they exist ''' bufsize=15000 #fast_write=0 prev_no_patterns = [0,0] if append: cfh = open(outfile+'.cal','r+',bufsize) vfh = open(outfile+'.ver','r+',bufsize) id = 0 for fh in [cfh,vfh]: while 1: line = string.strip(fh.readline()) if string.find(line,'No. of patterns') >=0: prev_no_patterns[id] = int(line[string.find(line,':')+1:]) if string.find(line,'Input pattern')>=0: break fh.seek(0) id = id + 1 else: if fast_write: cfh = PrintStream(BufferedOutputStream(FileOutputStream(outfile+'.cal'))) vfh = PrintStream(BufferedOutputStream(FileOutputStream(outfile+'.ver'))) else: cfh = open(outfile+'.cal','w',bufsize) vfh = open(outfile+'.ver','w',bufsize) # #if append: # print 'Prev # of patterns = %s'%prev_no_patterns npatterns = len(inputs[0]) - skip # skip beginning #print 'Time Window of inputs[0] %s'%str(inputs[0].getTimeWindow()) #print npatterns,len(inputs[0]),skip ncalibs = calibration_percentage*npatterns ninputs = len(inputs) noutputs = len(outputs) #print 'Ncalibs: %d & Nverse: %d'%(ncalibs,npatterns-ncalibs) if append or not fast_write: cfh.write('''SNNS pattern definition file V3.2 generated at date : %s No. of patterns : %d No. of input units : %d No. of output units : %d '''%(time.ctime(time.time()),ncalibs+prev_no_patterns[0],ninputs,noutputs)) vfh.write('''SNNS pattern definition file V3.2 generated at date : %s No. of patterns : %d No. of input units : %d No. of output units : %d '''%(time.ctime(time.time()),npatterns-ncalibs+prev_no_patterns[1],ninputs,noutputs)) else: cfh.print('''SNNS pattern definition file V3.2 generated at date : %s No. of patterns : %d No. of input units : %d No. of output units : %d '''%(time.ctime(time.time()),ncalibs+prev_no_patterns[0],ninputs,noutputs)) vfh.print('''SNNS pattern definition file V3.2 generated at date : %s No. of patterns : %d No. of input units : %d No. of output units : %d '''%(time.ctime(time.time()),npatterns-ncalibs+prev_no_patterns[1],ninputs,noutputs)) # #goto end of file if append: for fh in [cfh,vfh]: if fast_write: fh.close() else: fh.seek(0,2) if fast_write: cfh = PrintStream(BufferedOutputStream(FileOutputStream(outfile+'.cal',1))) vfh = PrintStream(BufferedOutputStream(FileOutputStream(outfile+'.ver',1))) # i=0 all = inputs + outputs iterator = MultiIterator(all) npattern = 1 nskip = 0 nprint_intvl = npatterns/10 nprints = 0 while not iterator.atEnd(): if npattern%nprint_intvl == 0: nprints = nprints + 1 print 'Done: %d%%'%(nprints*10) if nskip < skip: nskip=nskip+1 iterator.advance() continue if npattern > npatterns: break el = iterator.getElement() j=0 #print el if npattern <= ncalibs: fh = cfh pattern_no = npattern prev_pattern_no = prev_no_patterns[0] else: fh = vfh pattern_no = npattern - ncalibs prev_pattern_no = prev_no_patterns[1] if fast_write: fh.println() fh.println('# Input pattern %d:'%(pattern_no+prev_pattern_no)) else: fh.write('\n# Input pattern %d:\n'%(pattern_no+prev_pattern_no)) while j < ninputs: if fast_write: fh.print('%10.6f'%el.getY(j)) else: fh.write('%10.6f'%el.getY(j)) j=j+1 if fast_write: fh.println() fh.println('# Output pattern %d:'%(pattern_no+prev_pattern_no)) else: fh.write('\n# Output pattern %d:\n'%(pattern_no+prev_pattern_no)) while j < ninputs+noutputs: if fast_write: fh.print('%10.6f'%el.getY(j)) else: fh.write('%10.6f'%el.getY(j)) j=j+1 if fast_write: fh.println() else: fh.write('\n') npattern = npattern+1 iterator.advance() # cfh.close() vfh.close()
''' libpomdp ======== File: gen_rs_7_8.py Description: jython script to generate RockSample[7,8] Copyright (c) 2009, 2010, 2011 Diego Maniloff ''' import java.io.PrintStream as PrintStream import rocksampleGen out = PrintStream("7-8/RockSample_7_8.SPUDD") n = 7 k = [[2, 0], [0, 1], [3, 1], [6, 3], [2, 4], [3, 4], [5, 5], [1, 6]] apos = [0, 3] gen = rocksampleGen(n, k, apos, out)
copy("SOFTWARE/SPARTAN3_STARTERKIT/TEST_PROCESSOR_PROGRAMS/" + str(i) + "/object_code.oc.mif", "SOFTWARE/SPARTAN3_STARTERKIT/TEST_PROCESSOR_PROGRAMS/OBJECT_CODE.OC.MIF") sim = openSim() run(sim, "3020") assignmentsLog = sim.collectExecutedAssignments(str(i)) branchesLog = sim.collectExecutedBranches(str(i)) conditionsLog = sim.collectExecutedConditions(str(i)) ok = checkSignalValue("LEDS_LD", "11111111", sim) if (not ok): failedAssignments.append(assignmentsLog) failedBranches.append(branchesLog) failedConditions.append(conditionsLog) else: passedAssignments.append(assignmentsLog) passedBranches.append(branchesLog) passedConditions.append(conditionsLog) reportAssignments = Report.createReport(failedAssignments, passedAssignments, "Assignments") reportBranches = Report.createReport(failedBranches, passedBranches, "Branches") reportConditions = Report.createReport(failedConditions, passedConditions, "Conditions") reportAssignments.printStat(PrintStream(file("assignments.txt"))) reportAssignments.getSuspects().printStat(PrintStream(file("assignments_red.txt"))) reportBranches.printStat(PrintStream(file("branches.txt"))) reportBranches.getSuspects().printStat(PrintStream(file("branches_red.txt"))) reportConditions.printStat(PrintStream(file("conditions.txt"))) reportConditions.getSuspects().printStat(PrintStream(file("conditions_red.txt")))
''' libpomdp ======== File: genscript.py Description: jython script to generate a catch SPUDD file for a 5x5 rectangular grid catchw world with an omnicient wumpus and an agent with adjacent-cell sensing capabilities. ''' # imports import CatchGen import Wumpus import OmniWumpus import Sensor import AdjacentObs import CatchGridProperties import CatchRectangularGrid import java.io.PrintStream as PrintStream # declarations OL = 0.5 # omniscience level gp = CatchRectangularGrid(5, 5) w = OmniWumpus(gp, OL) s = AdjacentObs(gp) out = PrintStream("catch_rect_5_5.SPUDD") gen = CatchGen(5, 5, gp, w, s, out) # generate model gen.generate();
import os import imp import jpype import jpype.imports from jpype.types import * if jpype.isJVMStarted() != True: jars = [] for top, dirs, files in os.walk( imp.find_module('pdfextract')[1] + '/data'): for nm in files: if nm[-4:] == ".jar": jars.append(os.path.join(top, nm)) jpype.addClassPath(os.pathsep.join(jars)) jpype.startJVM(jpype.getDefaultJVMPath(), convertStrings=False) from java.lang import System from java.io import PrintStream, File System.setOut(PrintStream(File(os.devnull)))
def main(): # Parse arguments import argparse parser = argparse.ArgumentParser(description='Runs a bayesian optimisation for some of the algorithms defined in the PTSP framework') parser.add_argument('--algorithm', choices=["QD-MCTS", "S-MCTS", "MS-MCTS", "VanillaGA", "VanillaMCTS"], help='The algorithm that should be optimized', default="S-MCTS") parser.add_argument('--outputDir', default="./optimizationResults", help='The output directory for all data generated by the optimization') parser.add_argument("--ptspPath", default="./ptsp.jar", help="The path to the .jar file containing the PTSP framework") parser.add_argument("--iters", default="10", type=int, help="Number of parameter-points to test by the bayesian optimization") args = parser.parse_args() args.outputPath = f"{args.outputDir}/{args.algorithm}" # Find all previous logs for this optimization logs = glob.glob(f"{args.outputPath}/optimizationLogs*.json") csvLogs = glob.glob(f"{args.outputPath}/*.csv") # Launch the JVM jpype.startJVM() jpype.addClassPath(args.ptspPath) import framework.Optimization as optim # Move java output into a file from java.lang import System from java.io import PrintStream, FileOutputStream pathlib.Path(args.outputPath).mkdir(parents=True, exist_ok=True) System.setOut(PrintStream(FileOutputStream(f"{args.outputPath}/cout.txt", True))) # Algorithm specific data bounds = { "QD-MCTS" : { "lowER": (0.01, 10), # Exploration rate low-level search "highER": (0.01, 10), # Exploration rate high-level search "steps": (300, 600), # Number of steps for low-level search "rd": (10, 30) # rolloutDepth }, "S-MCTS" : { "cellSize": (5, 30), # Size of a cell in the subgoal grid (aka distance between subgoals) "er": (0.01, 10), # Exploration rate high-level search "steps": (300, 600), # Number of steps for low-level search "rd": (10, 30) # rolloutDepth }, "MS-MCTS" : { "cellSize": (5, 30), # Size of a cell in the subgoal grid (aka distance between subgoals) "er": (0.01, 10), # Exploration rate high-level search "steps": (300, 600), # Number of steps for low-level search "rd": (10, 30) # rolloutDepth }, "VanillaGA" : { "gl": (10, 30), # How many base-actions does a genome contain "ps": (1, 100), # How many genomes in one population "mr": (0.1, 0.9), # Probability that an action is mutated }, "VanillaMCTS" : { "er": (0.01, 10), # Exploration rate "rd": (10, 30), # RolloutDepth } } funcs = { "QD-MCTS" : lambda lowER, highER, steps, rd: execSafe(optim.runQD_MCTS, lowER, highER, round(steps), round(rd)), "S-MCTS" : lambda cellSize, er, steps, rd: execSafe(optim.runSMCTS, cellSize, er, round(steps), round(rd)), "MS-MCTS" : lambda cellSize, er, steps, rd: execSafe(optim.runMS_MCTS, cellSize, er, round(steps), round(rd)), "VanillaGA" : lambda gl, ps, mr: execSafe(optim.runVanillaGA, round(gl), round(ps), mr), "VanillaMCTS" : lambda er, rd: execSafe(optim.runVanillaMCTS, er, round(rd)) } # Probe points for each algorithm, only one which I've used previously probes = { "QD-MCTS" : {"lowER": math.sqrt(2), "highER": 4, "steps": 400, "rd": 25}, "S-MCTS" : {"cellSize": 20, "er": 4, "steps": 400, "rd": 25}, "MS-MCTS" : {"cellSize": 20, "er": 4, "steps": 400, "rd": 25}, "VanillaGA" : {"gl": 20, "ps": 50, "mr": 1. / 20}, "VanillaMCTS" : {"er": math.sqrt(2), "rd": 12} } # Initialize optimization optim.setupRun(len(logs) * 11) # Different seed for each run optim.RUN_COUNTER = len(csvLogs) # Make sure java logs into a new csv file optim.NUM_TRIALS = 10 optim.OutputDir = args.outputPath optim.m_mapNames = glob.glob("./maps/**/*.map", recursive=True) optimizer = BayesianOptimization( f=funcs[args.algorithm], pbounds=bounds[args.algorithm], random_state=len(logs) * 11, # Change behaviour for each run ) print(f"Optimizing {args.algorithm} with bounds:") print(bounds[args.algorithm]) # Probe if necessary init_points = 0 if len(logs) == 0: print("Found no previous logs... Probing to improve results:") print(probes[args.algorithm]) optimizer.probe(params=probes[args.algorithm], lazy=True) init_points = 5 else: # If we found logs, load them print(f"Reading previous logs into optimizer...") load_logs(optimizer, logs=logs); for log in logs: print(f"Successfully loaded {log}") logger = JSONLogger(path=f"{args.outputPath}/optimizationLogs{len(logs) + 1}.json") optimizer.subscribe(Events.OPTIMIZATION_STEP, logger) # Run optimization print(f"Starting optimisation for {args.algorithm}...") optimizer.maximize(init_points=init_points, n_iter=args.iters) print("Finished optimisation") print(optimizer.max)
''' libpomdp ======== File: gen_rs_10_11.py Description: jython script to generate RockSample[10,11] Copyright (c) 2009, 2010, 2011 Diego Maniloff ''' # imports import sys sys.path.append('../../../../dist/libpomdp.jar') import java.io.PrintStream as PrintStream from libpomdp.problems.rocksample import rocksampleGen # declarations out = PrintStream("10-11/RockSample_10_11.SPUDD") n = 10 k = [[0, 3], [0, 7], [1, 8], [3, 3], [3, 8], [4, 3], [5, 8], [6, 1], [9, 3], [9, 9], [9, 0]] apos = [0, 5] # generate gen = rocksampleGen(n, k, apos, out)
model_context, aliases, wlst_mode=__wlst_mode) except DeployException, de: __release_edit_session_and_disconnect() raise de exit_code = 0 try: # First we enable the stdout again and then redirect the stdoout to a string output stream # call isRestartRequired to get the output, capture the string and then silence wlst output again # __wlst_helper.enable_stdout() sostream = StringOutputStream() System.setOut(PrintStream(sostream)) restart_required = __wlst_helper.is_restart_required() is_restartreq_output = sostream.get_string() __wlst_helper.silence() if model_context.is_rollback_if_restart_required( ) and restart_required: __wlst_helper.cancel_edit() __logger.severe('WLSDPLY_09015', is_restartreq_output) exit_code = CommandLineArgUtil.PROG_ROLLBACK_IF_RESTART_EXIT_CODE else: __wlst_helper.save() __wlst_helper.activate() if restart_required: exit_code = CommandLineArgUtil.PROG_RESTART_REQUIRED except BundleAwareException, ex: __release_edit_session_and_disconnect()
''' libpomdp ======== File: genscript.py Description: jython script to generate a catch SPUDD file for a 5x5 rectangular grid catchw world with a random wumpus and an agent with adjacent-cell sensing capabilities. ''' # imports import CatchGen import Wumpus import RandomWumpus import Sensor import AdjacentObs import CatchGridProperties import CatchRectangularGrid import java.io.PrintStream as PrintStream # declarations gp = CatchRectangularGrid(5, 5) w = RandomWumpus(gp) s = AdjacentObs(gp) out = PrintStream("catch_rect_5_5_rand_adjobs.SPUDD") gen = CatchGen(5, 5, gp, w, s, out) # generate model gen.generate()