def check_map(wait_sec, local, require_cluster=False): inputs = [(1, wait_sec), (2, wait_sec), (4, wait_sec), (8, wait_sec), (16, wait_sec)] expected = list(map(compute_factorial, inputs)) outputs = grid_map(compute_factorial, inputs, quiet=False, local=local, require_cluster=require_cluster) eq_(expected, outputs)
def main(): # parse commandline parser = argparse.ArgumentParser( description='Distributing STGE comutation to SGE clusters') parser.add_argument("--genenum", default=[100], type=int, nargs="*", help="Number of gene") parser.add_argument("--amplitude", default=[300], type=float, nargs="*", help="Amplitude of gene expression") parser.add_argument("--width", default=[200], type=float, nargs="*", help="Broadness of gene expression") parser.add_argument("--tgain", default=[1.0], type=float, nargs="*", help="Time change speed") parser.add_argument("--lcorr", default=[200], type=float, nargs="*", help="Lenght of spatial correlaiton") parser.add_argument("--tcorr", default=[5], type=float, nargs="*", help="Lenght of time correlaiton") parser.add_argument("--scnum", default=[1000], type=int, nargs="*", help="Number of observed single cell expression at one timepoint") parser.add_argument("--refnum", default=[1000], type=int, nargs="*", help="Number of cells in simulation") parser.add_argument("--vbiter", default=[10], type=int, nargs="*", ;;;; help="Iteration of variational bayes") parser.add_argument("--optiter", default=[10], type=int, nargs="*", help="Iteration of parameter optimizaiton") parser.add_argument("--memreq", default="2G", type=str, nargs="*", help="Memorry requirement fo each job") parser.add_argument("--root", type=str, help="Path to root directory") parser.add_argument("--queue", type=str, default="mjobs.q", help="Queue type of jobs") args = parser.parse_args() cond_manager = condition_manager() cond_keys = ["genenum", "amplitude", "width", "tgain", "lcorr", "tcorr", "scnum", "refnum", "vbiter", "optiter"] cond_manager.load_cmdline(args, keys=cond_keys) # make root directory os.mkdir(args.root) # save experimental condition params_file_path = "/".join([args.root, "params.json"]) cond_manager.save_as_json(params_file_path) # convert args to condition list base_dict = {"root": args.root} cond_manager.make_cond_dict_list(base_dict) # apply stge_maint to condition_list job_name = args.root.split("/")[-1] if len(job_name) == 0: # the case the end of root is / job_name = args.root.split("/")[-2] temp_dir = "/".join([args.root, "tmp/"]) print(job_name) condition_list = grid_map( stge_main, cond_manager.cond_dict_list, mem_free=args.memreq, name=job_name, temp_dir=temp_dir, queue=args.queue) accuracy_df_list = [dict2r1df(condition) for condition in condition_list] accuracy_df = pd.concat(accuracy_df_list) file_path = args.root + "/accuracy.csv" accuracy_df.to_csv(file_path)
def main(): """ execute map example """ args = [1, 2, 4, 8, 16] intermediate_results = grid_map(computeFactorial, args, quiet=False) print("reducing result") for i, ret in enumerate(intermediate_results): print("f({0}) = {1}".format(args[i], ret))
def par(iterable_values): result = grid_map( function, iterable_values, quiet=False, num_slots=1, temp_dir=u"./tmp", max_processes=len(iterable_values), queue="all.q", require_cluster=False, local=False, cleanup=True, ) return result
def main(): """ execute map example """ logging.captureWarnings(True) logging.basicConfig(format=('%(asctime)s - %(name)s - %(levelname)s - ' + '%(message)s'), level=logging.INFO) args = [3, 5, 10, 20] # The default queue used by grid_map is all.q. You must specify # the `queue` keyword argument if that is not the name of your queue. intermediate_results = grid_map(computeFactorial, args, quiet=False, max_processes=4, queue='all.q') # Just print the items instead of really reducing. We could always sum them. print("reducing result") for i, ret in enumerate(intermediate_results): print("f({0}) = {1}".format(args[i], ret))
def run_tournament_cluster(to_play: List[List[str]]): from gridmap import grid_map import connectn.results as results from connectn.utils import TEMP_DIR logger = logging.getLogger(__name__) if not TEMP_DIR.exists(): TEMP_DIR.mkdir(parents=True) job_temp_dir = TEMP_DIR / time.strftime("%Y-%m-%d-%Hh%Mm%Ss") job_temp_dir.mkdir() logger.info(f"Submitting games to the queue: {to_play}") n_games = len(to_play) n_done = 1 for game_result in grid_map( run_single_game, to_play, mem_free="2G", name="conn4match", num_slots=1, temp_dir=f"{job_temp_dir!s}", queue="cognition-all.q", add_env={ "CREATE_PLOTS": "FALSE", "USE_MEM_FREE": "TRUE" }, require_cluster=True, ): logging.info(f"Received result {n_done} of {n_games}") results.add_game(game_result) logging.info(f"Wrote result {n_done} of {n_games} to disk.") n_done += 1 logging.info(f"Finished all {n_games} games.")
def check_map_partial(local): inputs = [1, 2, 4, 6, 8, 16] expected = [x + 2 for x in inputs] outputs = grid_map(add_two, inputs, quiet=False, local=local) eq_(expected, outputs)
def loop(self, screen): for i in range(len(self.stages)): print('lvlstart') self.GRID_MAP = gridmap.grid_map(ARESTA, NSQUARES) clock = pygame.time.Clock() last_clicked_grid_X = 7 last_clicked_grid_Y = 1 LEVEL = self.stages[i] missao = pygame.image.load(LEVEL.text) rob = None pygame.mixer.music.play(-1, 0.0) while True: delta_t = clock.tick(FRAME_RATE) # handle input events for event in pygame.event.get(): if event.type == pygame.QUIT: return # closing the window, end of the game loop elif event.type == pygame.MOUSEBUTTONDOWN: pos = pygame.mouse.get_pos() column = pos[1] // (ARESTA + MARGIN) row = pos[0] // (ARESTA + MARGIN) if (column < 15 and row < 15): last_clicked_grid_X = row last_clicked_grid_Y = column self.GRID_MAP.change_selection(column, row) elif event.type == pygame.KEYDOWN: if (event.key in DIRECTION_CONTROLLER): play_sound(EQUIP) grid_maker = self.builderTable[self.builder] self.GRID_MAP.grid[last_clicked_grid_X][ last_clicked_grid_Y] = grid_maker.makegrid( event.key) elif event.key == Q_KEY: T = 0 play_sound(START) rob = LEVEL.create_bot() elif (event.key in BUILDER_CONTROLLER): play_sound(CHANGE) self.builder = self.builderSelector[event.key] elif (event.key in MOVEMENT_CONTROLLER): movement = MOVEMENT_CONTROLLER[event.key] last_clicked_grid_X += movement[0] last_clicked_grid_Y += movement[1] self.GRID_MAP.change_selection( last_clicked_grid_Y, last_clicked_grid_X) # render game screen screen.fill((255, 255, 255)) # black background self.GRID_MAP.drawmap(screen) if (rob != None): rob.draw(screen) (X, Y) = rob.get_coords() x = X // (ARESTA + MARGIN) y = Y // (ARESTA + MARGIN) actual_grid = self.GRID_MAP.grid[x][y] rob.move() rob.change_speed(actual_grid) if (rob != None and rob.get_speed() == (0, 0)): if (rob.validate()): print('SUCCESS') play_sound(SUCCESS) rob = LEVEL.create_bot() if (rob == None): play_sound(END) pygame.time.delay(4000) break else: print('FAILURE') play_sound(ERROR) rob = None LEVEL.reset() screen.blit(missao, (600, 0)) pygame.display.update()
def grid_jug( jugfile, jugdir=None, jug_args=None, jug_nworkers=4, name='gridjug', keep_going=False, verbose=False, capture_jug_stdout=False, **kwargs ): """ A light-weight wrapper to run Jug with GridMap on a Grid Engine cluster From their own description, GridMap is a package that allows to easily create jobs on a Grid Engine powered cluster directly from Python. This wrapper lets GridMap simply spawn several jug-execute workers on a Grid Engine cluster. Thus we have the benefit of programmatic (reproducible) execution of Jug processes. Furthermore, GridMap adds a convenient monitoring and reporting layer. Under the hood, of course, Jug keeps doing the actual work. Parameters ---------- jugfile : path Path to the jugfile jugdir : path Where to save intermediate results jug_args : list Other jug command-line arguments. Note that ``'execute'`` is already included. The command line is roughly equivalent to: 'jug execute {jugfile} ' + ' '.join(jug_args) jug_nworkers : int, optional number of Grid Engine tasks to start (i.e. number of times 'jug execute' is run) name : str, optional base name of the Grid Engine task keep_going : bool, optional Strongly recommended! Defaults to ``False``: if a single Jug task fails, GridMap will cancel all jobs! If ``True``, Jug does not raise an exception but keeps retrying the task. verbose : bool, optional If ``True``, Jug logs ``INFO`` events capture_jug_stdout : bool, optional Defaults to ``False``. If ``True``, captures Jug's task summary printed to stdout. **kwargs : keyword-dict, optional additional options passed through to :any:`gridmap.grid_map` See Also -------- :any:`gridmap.grid_map` : The map function `Jug subcommands <http://jug.readthedocs.org/en/latest/subcommands.html>`_ """ import gridmap jug_argv = ['jug', 'execute'] jug_argv.append('{}'.format(jugfile)) if jugdir is not None: jug_argv.append('--jugdir={}'.format(jugdir)) if keep_going: jug_argv.append('--keep-going') if verbose: jug_argv.append('--verbose=INFO') if jug_args is not None: jug_argv.extend(jug_args) # function arguments for grid_map # note that there are multiple lists here # the innermost list is the list of arguments to jug # this needs to stay a list as jug.main accepts a single argument argv # which is a list of parameters for jug # https://github.com/luispedro/jug/blob/15a7043f6f859810b5e6af1638176d1a9cb70f5a/jug/jug.py#L405 # # we wrap this inner list in a wrapper list [jug_argv] that gridmap # "expands" to its single item jug_argv upon calling the jug.main function, # with that very single item jug_argv as the single argument # https://github.com/pygridtools/gridmap/blob/master/gridmap/job.py#L225 # # finally, the wrapper list [jug_arvg] is contained in the outer list # [[jug_argv]]. The outer list is multiplied by the number of workers to # create an outer list of that many items, each of which is the wrapped # list [jug_argv] to supplied to each of the worker jobs # https://github.com/pygridtools/gridmap/blob/master/gridmap/job.py#L929 # https://github.com/pygridtools/gridmap/blob/master/gridmap/job.py#L933 # args_list = jug_nworkers * [[capture_jug_stdout, jug_argv]] return gridmap.grid_map( f=_jug_main, args_list=args_list, name=name, **kwargs )