def trees(s,b): '''Generate the binary tree objects for the experimental points. s : (tmin,tmax) for symmetric tree b : (tmin,tmax) for broken tree return : symmetric,broken''' symmetric=bintree(*s) symmetric.root.split() symmetric.root.left.split() symmetric.root.right.split() symmetric.root.left.left.split() symmetric.root.left.right.split() symmetric.root.right.left.split() symmetric.root.right.right.split() symmetric.root.left.left.left.split() symmetric.root.right.right.right.split() symmetric.root.right.right.right.right.split() broken=bintree(*b) broken.root.split() broken.root.left.split() broken.root.right.split() broken.root.left.left.split() broken.root.left.right.split() broken.root.left.left.left.split() broken.root.left.left.right.split() broken.root.left.left.left.left.split() broken.root.left.left.right.left.split() broken.root.left.left.right.left.left.split() broken.root.left.left.left.left.left.split() return symmetric,broken
def _gen_tree_impl(p_tree, p_config): """ Internal implementation of dungeon tree generation. """ if p_tree.level >= p_config.max_depth: #abort if too low in the tree... return p_tree new_rooms = _split_room(p_tree.val, p_config) #...or splitting is not possible if not new_rooms: return p_tree first_room, second_room = new_rooms left = bintree() #set new room attributes right = bintree() left.val = first_room right.val = second_room p_tree.set_left(left) p_tree.set_right(right) _gen_tree_impl(p_tree.left, p_config) #recurse for new child rooms _gen_tree_impl(p_tree.right, p_config)
def _main(self): parser = optparse.OptionParser(usage=usage) parser.add_option('-v', '--verbose', action='count', help='Verbose output. Specify multiple times for more verbosity.' ) parser.add_option('--blocks', type='int', action='store', default=1048576, metavar=`1048576`, help='The size of the file in blocks.' ) parser.add_option('--num-writers', type='int', action='store', default=50, metavar=`50`, help='The number of writer threads.' ) parser.add_option('--num-readers', type='int', action='store', default=50, metavar=`50`, help='The number of reader threads.' ) parser.add_option('--max-num-blocks', type='int', action='store', default=100*1024, metavar=`100*1024`, help='The maximum number of blocks to write.' ) parser.add_option('--block-size', type='int', action='store', default=1, metavar=`1`, help='The size of a block.' ) parser.add_option('--duration', type='int', action='store', default=60, metavar=`60`, help='How long to run the test in seconds.' ) parser.add_option('--reader-delay', type='int', action='store', default=10000, metavar=`10000`, help='How many writes to wait to finish before starting the readers.' ) parser.add_option('--greediness', type='int', action='store', default=10, metavar=`10`, help='Number of consecutive reads or writes to perform before yielding.' ) parser.add_option('--cancel-percent', type='int', action='store', default=10, metavar=`10`, help='The percent of operations to try to cancel.' ) parser.add_option('--lio', action='store_true', help='Use LIO instead of AIO.' ) parser.add_option('--min-lio', type='int', action='store', default=1, metavar=`1`, help='The minimum number of events per LIO submit.' ) parser.add_option('--max-lio', type='int', action='store', default=MAX_LIO, metavar=`MAX_LIO`, help='The maximum number of events per LIO submit.' ) parser.add_option('--max-lio-size', type='int', action='store', default=MAX_LIO_SIZE, metavar=`MAX_LIO_SIZE`, help='The maximum size of a block of data in a LIO request. Do not change unless you know what you are doing. This is used instead of --max-num-blocks when using LIO.' ) parser.add_option('--num-lio-workers', type='int', action='store', default=50, metavar=`50`, help='The number of workers to use for LIO (used instead of --num-readers and --num-writers).' ) # blocked # interrupt percentage self.options, arguments = parser.parse_args() if len(arguments) != 1: parser.error('Must specify 1 argument.') # Check for valid settings. if self.options.lio: if not USING_LISTIO: parser.error('Unable to use LIO. Either lio_listio is not compiled, or the sysctl p1003_1b.aio_listio_max is not set.') if self.options.max_lio > MAX_LIO: parser.error('Maximum number of LIO events cannot be set above the p1003_1b.aio_listio_max sysctl value (currently %i).' % (MAX_LIO,)) if self.options.min_lio > self.options.max_lio: parser.error('--min-lio cannot be set above --max-lio') if self.options.max_lio_size % self.options.block_size: parser.error('--max-lio-size is not a multiple of --block-size') else: if self.options.max_num_blocks > self.options.blocks/2: parser.error('max_num_blocks cannot be greater than the file size divided by 2.') self._size = self.options.blocks * self.options.block_size self._write_locks = bintree.bintree((0, self._size)) self._read_locks = bintree.bintree((0, self._size)) self.path = arguments[0] self.main_thread_state = 'Creating file.' self.create() self.main_thread_state = 'Preparing file.' self.prep() if self.options.lio: self.start_lio() else: self.start_aio() print 'Write cancel success: %i' % (self._write_cancel_success,) print 'Write cancel failure: %i' % (self._write_cancel_fail,) print 'Read cancel success: %i' % (self._read_cancel_success,) print 'Read cancel failure: %i' % (self._read_cancel_fail,) print 'Total writes: %s (%s bytes)' % (comma_group(self._writes_finished), comma_group(self._bytes_written)) print 'Total reads: %s (%s bytes)' % (comma_group(self._reads_finished), comma_group(self._bytes_read)) print 'Assertion errors: %i' % (self._assertion_errors,)
def __init__(self, p_dims): self.dungeon = bintree() root_room = room() root_room.dims = p_dims self.dungeon.val = root_room self.corridors = []
import ipyparallel as ipp # connect client and create views rc = ipp.Client() rc.block = True ids = rc.ids root_id = ids[0] root = rc[root_id] view = rc[:] from bintree import bintree, print_bintree # generate binary tree of parents btree = bintree(ids) print("setting up binary tree interconnect:") print_bintree(btree) view.run('bintree.py') view.scatter('id', ids, flatten=True) view['root_id'] = root_id # create the Communicator objects on the engines view.execute('com = BinaryTreeCommunicator(id, root = id==root_id )') pub_url = root.apply_sync(lambda: com.pub_url) # noqa: F821 # gather the connection information into a dict ar = view.apply_async(lambda: com.info) # noqa: F821 peers = ar.get_dict()
if __name__=='__main__': ### SYMMETRIC ### J=1.0 gamma=0.95 if (J!=gamma): period=math.pi/math.sqrt(abs(J**2-gamma**2)) else: period=10 with open("../data/symmetric/theory.dat","w") as fout: fout.write("# t J gamma norm A B C D AB AC AD BC BD CD\n") for t in numpy.linspace(0,period,200): U,norm=ptmatrix(gamma,J,t) addline(gamma,J,t,U,norm,fout) # Set up binary tree describing experimental data tree=bintree(0.,period) tree.root.split() tree.root.left.split() tree.root.right.split() tree.root.left.left.split() tree.root.right.right.split() tree.root.left.left.left.split() tree.root.right.right.right.split() tree.root.left.left.left.left.split() tree.root.right.right.right.right.split() with open("../data/symmetric/simulated.dat","w") as fout: fout.write("# t J gamma norm A B C D AB AC AD BC BD CD\n") for n,t in enumerate(tree.points()): U,norm=ptmatrix(gamma,J,t) addline(gamma,J,t,U,norm,fout)