def compute_trend(ldates): ''' request trend computation for the input list of dates ''' # accumulate all grids # notice that the reduce(map ...) allows for minimal memory usage # and for code parelellization tick = time.time() sumA = reduce( np.add, map(grd_to_trend_ls, map(ghcn.load, ldates), itertools.repeat(ldates[len(ldates) / 2], len(ldates)))) tock = time.time() - tick print ' Elapsed time: %f' % (tock, ) # solve the ls of equations print "Computing trend ..." trend, bias = solve_trend_ls(sumA, thrs=len(ldates) / 10.0) # now create corresponding grids aux = ghcn.load(ldates[0]) trend = grids.Grid(xlim=aux.xlim, ylim=aux.ylim, dx=aux.dx, dy=aux.dy, z=np.reshape(trend, (aux.nx, aux.ny)), zlabel='temperature anomaly trend', zunit='deg/yr', xlabel='longitude', xunit='deg', ylabel='latitude', yunit='deg') bias = grids.Grid(xlim=aux.xlim, ylim=aux.ylim, dx=aux.dx, dy=aux.dy, z=np.reshape(bias, (aux.nx, aux.ny))) if not os.path.isdir(ghcn.path('results')): os.makedirs(ghcn.path('results')) # save computed results trend.save(ghcn.path('trend', ldates)) bias.save(ghcn.path('bias', ldates)) return trend, bias
def pickle_grid(raw, rdate): ''' Create a Grid from data raw with date rdate and store it in the data folder ''' dgrd = grids.Grid(xyz=raw, date=rdate, xlim=defaults('xlim'),ylim=defaults('ylim')) # save dgrd.save(path('daily file', rdate))
def build_castles(width, height): grid = grids.Grid(width, height) grid.maybe_add_block(blocks.Block(0, width), 0) candidates = set([grid]) for row_index in xrange(1, height): new_candidates = set() for grid in candidates: for filled_grid in randomly_fill_row(grid, row_index, 0): new_candidates.add(filled_grid) candidates = new_candidates castles = set() for candidate in candidates: if candidate.is_castle() is True: castles.add(candidate) return castles
def test_randomly_fill_row_width_4(self): grid = grids.Grid(4, 2) randomly_filled_grids = [ grid for grid in randomly_fill_row(grid, 0, 0) ] self.assertEqual(15, len(set(randomly_filled_grids)))
def test_randomly_fill_row_width_3(self): grid = grids.Grid(3, 2) randomly_filled_grids = [ grid for grid in randomly_fill_row(grid, 0, 0) ] self.assertEqual(7, len(randomly_filled_grids))
block = blocks.Block(col, length) grid_clone = grid.clone() grid_clone.maybe_add_block(block, row_index) for filled_grid in randomly_fill_row(grid_clone, row_index, block.start + block.length + 1): yield filled_grid else: yield grid_clone def build_castles(width, height): grid = grids.Grid(width, height) grid.maybe_add_block(blocks.Block(0, width), 0) candidates = set([grid]) for row_index in xrange(1, height): new_candidates = set() for grid in candidates: for filled_grid in randomly_fill_row(grid, row_index, 0): new_candidates.add(filled_grid) candidates = new_candidates castles = set() for candidate in candidates: if candidate.is_castle() is True: castles.add(candidate) return castles grid = grids.Grid(4, 3) assert grid.maybe_add_block(blocks.Block(0, 3), 0) grids.pretty_print(grid)
allgrids = gr.define_grids(param) nlevs = len(allgrids) subdomains = subdom.set_subdomains(allgrids) subdom.attach_subdomain_to_grids(allgrids, subdomains, myrank) # subdom.print_subdomains(subdomains) grids = [] for lev in range(nlevs): grd = allgrids[lev] subdomain = subdomains[grd["subdomain"]] subdomain['myrank'] = myrank grd['neighbours'] = subdomain['allneighbours'][myrank] grd['extension'] = 26 grids += [gr.Grid(grd, param)] lev = 0 fine = grids[lev] coarse = grids[lev+1] I = set_interpolation_matrix(fine, coarse) R = set_restriction_matrix(fine, coarse) print('****** interpolation') coarse.x[:] = np.arange(coarse.N) fine.x[:] = I*coarse.x # fine.halofill('x') print(coarse.toarray('x')) print(fine.toarray('x'))
def setUp(self): my_file = file_gestion.file_reader("TheForest") self.grid = grids.Grid(my_file[0], my_file[1], my_file[2]) self.player_object = objects.Objects(my_file[-1][0][0], my_file[-1][0][1], self.grid, 1)
def setUp(self): my_file = file_gestion.file_reader("TheForest") self.grid = grids.Grid(my_file[0], my_file[1], my_file[2])