def main(arg): all_filenames = [] for (dirpath, dirnames, filenames) in walk(TEST_DIRECTORY): for fn in filenames: all_filenames.append((dirpath, fn)) all_filenames = [(d, f) for (d, f) in all_filenames if f.endswith('.txt')] for d, f in all_filenames: if f.startswith('raw_'): new_f = d + f[4:] with open(new_f, 'w') as nf: nf.write(parse_raw(d + f)) for d, f in all_filenames: if not f.startswith(arg): continue test_map, num, ans = read_map(d + f) find_solution(test_map, num, ans) break
def game_loop(): field = read_map("map.txt") speed = 100 initialize_render() while True: if blt.has_input(): key = blt.read() if key == blt.TK_A: speed = 1000 if key == blt.TK_S: speed = 500 if key == blt.TK_D: speed = 100 if key == blt.TK_F: speed = 10 if key == blt.TK_CLOSE or key == blt.TK_Q: break update(field) render() blt.delay(speed) terminate_render()
luc = len(luc_names) pas = 1 fea = 6 act = luc - (pas + fea) # Specify the maximum neighbourhood size distance considered max_distance = 5 # Set the minimum required percentage of conversions. min_convo_rate = 0.025 # Set the minimum required Enrichment Factor value at distance 1. min_EF_1 = 0.00 # Set the significance limit (recommended value is 1.96 z_limit = 1.96 # Read in the map for time slice 1. omap = read_map(omap_path) # Read in the map for time slice 2. amap = read_map(amap_path) # Read in the masking map. mask = read_map(mask_path) # Analyse the input maps for evaluation purposes map_dimensions = np.shape(omap) rows = map_dimensions[0] cols = map_dimensions[1] # Determine the distances that will be analysed using the module considered # distances. temp = considered_distances(max_distance) # Store the list of considered distances as a variable. cd = temp[0] # Store the total number of distances considered
def define_surface_co2_region(x, y, xedge, yedge, do_debug=False): """ the state variables define following transcom definiation see http://www.purdue.edu/transcom/images/smoothmap2_final_2.jpg """ import read_map lon, lat, type_map=read_map.read_map(360, 180) land_reg=['ice',\ 'North American Boreal',\ 'North American Temperate',\ 'South American Tropical',\ 'South American Temperate',\ 'North Africa',\ 'South Africa',\ 'Eurasia Boreal', \ 'Eurasia Temperate',\ 'Tropical Asia', \ 'Australia', \ 'Europe'] ocean_reg=['North Pacific Temperate', \ 'West Pacific Tropics', \ 'East Pacific Tropics', \ 'South Pacific Temperate',\ 'Northern Ocean', 'Northen Atlantic Temperate',\ 'Atlantic Tropics', \ 'South Atlantic Temperate',\ 'South Ocean', \ 'Indian Tropical', \ 'South Indian Temperate'] type_map=type_map.astype(int) nz=max(type_map.flat) type_map_sav=array(type_map) JJPAR=gm.grid_mod.get_y_size() JY=arange(1, JJPAR+1) YY = gm.grid_mod.get_ymids( JY ) new_map=read_map.regrid_map(lon, lat, xedge, yedge,type_map_sav) nx=size(xedge)-1 ny=size(yedge)-1 new_map=new_map[0:nx, 0:ny] if (do_debug): subplot(2,1,1) nz=max(type_map.flat) type_map=1.0*type_map gpl.plot_map(type_map,lon, lat, use_pcolor=1) subplot(2,1,2) gpl.plot_map(new_map, x, y, use_pcolor=1) show() # the emission error are taken transcom prior flux file 3 for basis function error while the ice one is assigned to a small value (percentage) # def_err=[0.15, \ 0.726200, 1.500000, 1.412200, \ 1.227300, 1.331700, 1.411400, \ 1.514300, 1.728300, 0.865600, \ 0.593600, 1.420900, 0.820000, \ 0.500000, 0.560000, 1.220000, \ 0.260000, 0.400000, 0.400000, \ 0.480000, 1.500000, 0.740000, \ 0.540000] # in Gt C /year return new_map, land_reg, ocean_reg, def_err
def define_surface_co2_region_cell(x, y, xedge, yedge, \ sel_id, nlon_reg, nlat_reg, \ do_debug=False): """ the state variables define following transcom definiation but increase some new regions see http://www.purdue.edu/transcom/images/smoothmap2_final_2.jpg """ import read_map lon, lat, type_map=read_map.read_map(360, 180) land_reg=['ice',\ 'North American Boreal',\ 'North American Temperate',\ 'South American Tropical',\ 'South American Temperate',\ 'North Africa',\ 'South Africa',\ 'Eurasia Boreal', \ 'Eurasia Temperate',\ 'Tropical Asia', \ 'Australia', \ 'Europe'] ocean_reg=['North Pacific Temperate', \ 'West Pacific Tropics', \ 'East Pacific Tropics', \ 'South Pacific Temperate',\ 'Northern Ocean', 'Northen Atlantic Temperate',\ 'Atlantic Tropics', \ 'South Atlantic Temperate',\ 'South Ocean', \ 'Indian Tropical', \ 'South Indian Temperate'] type_map=type_map.astype(int) nz=max(type_map.flat) type_map_sav=array(type_map) JJPAR=gm.grid_mod.get_y_size() JY=arange(1, JJPAR+1) YY = gm.grid_mod.get_ymids( JY ) new_map=read_map.regrid_map(lon, lat, xedge, yedge,type_map) nx=size(xedge)-1 ny=size(yedge)-1 new_map=new_map[0:nx, 0:ny] print(max(new_map.flat)) type_map=array(new_map) lon=array(xedge[0:nx]) lat=array(xedge[0:ny]) new_map=zeros(shape(type_map)) idx=where(type_map==sel_id) idx=squeeze(idx) cnt_cell=0 idx_lon=idx[0] idx_lat=idx[1] npt=size(idx_lon) for pt in range(npt): new_map[idx_lon[pt], idx_lat[pt]]=cnt_cell cnt_cell=cnt_cell+1 print('cnt_cell', cnt_cell) type_map=where(type_map>sel_id, type_map+cnt_cell-1, type_map) type_map=type_map+new_map if (do_debug): subplot(2,1,1) nz=max(type_map.flat) type_map=1.0*type_map gpl.plot_map(type_map,lon, lat, use_pcolor=1) subplot(2,1,2) gpl.plot_map(new_map, x, y, use_pcolor=1) show() # the emission error are taken transcom prior flux file 3 for basis function error while the ice one is assigned to a small value (percentage) # def_err_land=[0.15, \ 0.726200, 1.500000, 1.412200, \ 1.227300, 1.331700, 1.411400, \ 1.514300, 1.728300, 0.865600, 0.593600, 1.420900] # def_err_ocean=[0.820000, \ # 0.500000, 0.560000, 1.220000, \ # 0.260000, 0.400000, 0.400000, \ # 0.480000, 1.500000, 0.740000, \ # 0.540000] # in Gt C /year def_err_ocean=[0.270000, \ 0.390000, 0.370000, 0.630000, \ 0.350000, 0.270000, 0.410000, \ 0.550000, 0.720000, 0.480000, \ 0.410000] # in Gt C /year def_err_land[sel_id]=def_err_land[sel_id]/(sqrt(cnt_cell)) all_def_err_land=def_err_land[0:sel_id+1]+[def_err_land[sel_id]]*(cnt_cell-1)+def_err_land[sel_id+1:] new_reg=list() for ireg in range(1, cnt_cell): regname='SUB_R%2.2dS%2.2d' % (sel_id, ireg) new_reg.append(regname) print(new_reg) all_land_reg=land_reg[0:sel_id+1]+new_reg+land_reg[sel_id+1:] print(len(all_land_reg)) def_err=all_def_err_land+def_err_ocean # subplot(2,1,1) # gpl.plot_map(type_map,x, y, use_pcolor=1,cmap=cm.jet) # subplot(2,1,2) # gpl.plot_map(new_map, x, y, use_pcolor=1,cmap=cm.jet) # show() new_map=array(type_map) return new_map, all_land_reg, ocean_reg, def_err
def define_surface_co2_region_hr(x, y, xedge, yedge, \ sel_id, nlon_reg, nlat_reg, \ do_debug=False): """ the state variables define following transcom definiation but increase some new regions see http://www.purdue.edu/transcom/images/smoothmap2_final_2.jpg """ import read_map lon, lat, type_map=read_map.read_map(360, 180) land_reg=['ice',\ 'North American Boreal',\ 'North American Temperate',\ 'South American Tropical',\ 'South American Temperate',\ 'North Africa',\ 'South Africa',\ 'Eurasia Boreal', \ 'Eurasia Temperate',\ 'Tropical Asia', \ 'Australia', \ 'Europe'] ocean_reg=['North Pacific Temperate', \ 'West Pacific Tropics', \ 'East Pacific Tropics', \ 'South Pacific Temperate',\ 'Northern Ocean', 'Northen Atlantic Temperate',\ 'Atlantic Tropics', \ 'South Atlantic Temperate',\ 'South Ocean', \ 'Indian Tropical', \ 'South Indian Temperate'] type_map=type_map.astype(int) nz=max(type_map.flat) type_map_sav=array(type_map) JJPAR=gm.grid_mod.get_y_size() JY=arange(1, JJPAR+1) YY = gm.grid_mod.get_ymids( JY ) idx=where(type_map==sel_id) lon_reg_id=idx[0] lat_reg_id=idx[1] rlon_reg=lon[lon_reg_id] rlat_reg=lat[lat_reg_id] min_lon_reg, max_lon_reg=min(rlon_reg), max(rlon_reg) min_lat_reg, max_lat_reg=min(rlat_reg), max(rlat_reg) id_lon=where(logical_and(lon>=min_lon_reg, lon<=max_lon_reg)) id_lon=squeeze(id_lon) id_lat=where(logical_and(lat>=min_lat_reg, lat<=max_lat_reg)) id_lat=squeeze(id_lat) sel_id_list=list() areas=list() new_map=zeros(shape(type_map)) for ilat in id_lat: sel_idx=where(lat_reg_id==ilat) sel_idx=squeeze(sel_idx) if (size(sel_idx)>0): ix, iy=lon_reg_id[sel_idx], lat_reg_id[sel_idx] area_y=abs(cos(pi*lat[ilat]/180.0))*size(sel_idx) areas.append(area_y) else: areas.append(0) sel_id_list.append(array(sel_idx)) areas=array(areas) area_div=sum(areas)/nlat_reg area_sum=0.0 nlat_div=size(id_lat) area_lon=zeros(size(id_lon), float) # print shape(area_lon) # print id_lon # print lon[id_lon] # print id_lat # print lat[id_lat] sub_sel_idx=list() region_count=0 print('nlat_div', nlat_div, area_div) new_reg=list() for ilat in range(nlat_div): area_sum=area_sum+areas[ilat] # print ilat, area_sum # spread area into lon sel_idx=sel_id_list[ilat] if (size(sel_idx)>0): tmp_lon_pos=lon_reg_id[sel_idx] # print id_lon[40:50] # print tmp_lon_pos lon_pos=searchsorted(id_lon, tmp_lon_pos) lon_pos=squeeze(lon_pos) # print lon_pos # print id_lon[lon_pos] # print shape(area_lon) # print max(lon_pos) cell_area=abs(cos(pi*lat[lat_reg_id[sel_idx[0]]]/180.0)) for ii in range(size(sel_idx)): area_lon[lon_pos[ii]]=area_lon[lon_pos[ii]]+cell_area sub_sel_idx.append(sel_idx[ii]) if (area_sum>=area_div or ilat==nlat_div-1): sub_sel_idx=array(sub_sel_idx) # divide the along the longitude area_lon_div=sum(area_lon)/nlon_reg area_lon=add.accumulate(area_lon) lon_div_ar=arange(0, nlon_reg,1.0) lon_div_ar=area_lon_div*lon_div_ar lon_pos_2=searchsorted(lon_div_ar, area_lon) lon_pos_2=lon_pos_2-1 ix, iy=lon_reg_id[sub_sel_idx], lat_reg_id[sub_sel_idx] print(shape(id_lon)) # print shape(ix), shape(iy) tmp_ix_pos=searchsorted(id_lon, ix) # print 'max lon_pos_2', max(lon_pos_2) # print lon_div_ar # print area_lon[tmp_ix_pos] # print lon_pos_2[tmp_ix_pos] # print lon_pos_2 print('nz', nz) new_reg_id=lon_pos_2[tmp_ix_pos]+(region_count)*nlon_reg print(min(new_reg_id), max(new_reg_id), region_count) # new_reg_id=where(new_reg_id==0, 0, new_reg_id+nz) new_map[ix, iy]=new_reg_id # -type_map[ix,iy] region_count=region_count+1 area_sum=0.0 area_lon=zeros(size(id_lon), float) sub_sel_idx=list() type_map=where(type_map>sel_id, type_map+nlon_reg*nlat_reg-1, type_map) type_map=type_map+new_map # now we add some new regions into it new_map=read_map.regrid_map(lon, lat, xedge, yedge,type_map) nx=size(xedge)-1 ny=size(yedge)-1 new_map=new_map[0:nx, 0:ny] print(max(new_map.flat)) # figure(2) # gpl.plot_map(type_map,lon, lat, use_pcolor=1, cmap=cm.Paired) #Set1) # savefig('test_div.png') # savefig('test_div.ps') # show() if (do_debug): subplot(2,1,1) nz=max(type_map.flat) type_map=1.0*type_map gpl.plot_map(type_map,lon, lat, use_pcolor=1) subplot(2,1,2) gpl.plot_map(new_map, x, y, use_pcolor=1) show() # the emission error are taken transcom prior flux file 3 for basis function error while the ice one is assigned to a small value (percentage) # def_err_land=[0.15, \ 0.726200, 1.500000, 1.412200, \ 1.227300, 1.331700, 1.411400, \ 1.514300, 1.728300, 0.865600, 0.593600, 1.420900] # def_err_ocean=[0.820000, \ # 0.500000, 0.560000, 1.220000, \ # 0.260000, 0.400000, 0.400000, \ # 0.480000, 1.500000, 0.740000, \ # 0.540000] # in Gt C /year def_err_ocean=[0.270000, \ 0.390000, 0.370000, 0.630000, \ 0.350000, 0.270000, 0.410000, \ 0.550000, 0.720000, 0.480000, \ 0.410000] # in Gt C /year def_err_land[sel_id]=def_err_land[sel_id]/(sqrt(nlon_reg)*sqrt(nlat_reg)) all_def_err_land=def_err_land[0:sel_id+1]+[def_err_land[sel_id]]*(nlon_reg*nlat_reg-1)+def_err_land[sel_id+1:] for ireg in range(1, nlon_reg*nlat_reg): regname='SUB_R%2.2dS%2.2d' % (sel_id, ireg) new_reg.append(regname) print(new_reg) all_land_reg=land_reg[0:sel_id+1]+new_reg+land_reg[sel_id+1:] def_err=all_def_err_land+def_err_ocean return new_map, all_land_reg, ocean_reg, def_err
geo_cmd = "C:\\Program Files (x86)\\Geonamica\\Metronamica\\GeonamicaCmd.exe" # Set the land-use class names. luc_names = [ "Natural areas", "Arable land", "Permanent crops", "Pastures", "Agricultural areas", "Residential", "Industry & commerce", "Recreation areas", "Forest", "Road & rail", "Seaports", "Airports", "Mine & dump sites", "Fresh water", "Marine water" ] # Set the land-use class parameters: number of land-use classes, passive, # feature, and active. luc = len(luc_names) pas = 1 fea = 6 act = luc - (pas + fea) # Read in the map for the data at time slice 1. omap = read_map(map1_path) # Read in the map for the data at time slice 2. amap = read_map(map2_path) # Read in the masking map. mask = read_map(mask_path) # Analyse the input maps for evaluation purposes map_dimensions = np.shape(omap) rows = map_dimensions[0] cols = map_dimensions[1] # Specify the maximum neighbourhood size distance considered max_distance = 5 # Determine the distances that will be analysed, use module: considered_distances. temp = considered_distances(max_distance) # Store the list of considered distances as a variable. cd = temp[0] # Store the total number of distances considered
geo_cmd = "C:\\Program Files (x86)\\Geonamica\\Metronamica\\GeonamicaCmd.exe" # Set the land-use class names. luc_names = [ "Natural areas", "Arable land", "Permanent crops", "Pastures", "Agricultural areas", "Residential", "Industry & commerce", "Recreation areas", "Forest", "Road & rail", "Seaports", "Airports", "Mine & dump sites", "Fresh water", "Marine water" ] # Set the land-use class parameters: number of land-use classes, passive, # feature and active. luc = len(luc_names) pas = 1 fea = 6 act = luc - (pas + fea) # Read in the map for the data at time slice 1. map_1990 = read_map(map1_path) # Read in the map for the data at time slice 2. map_2000 = read_map(map2_path) # Read in the map for the data at time slice 3. map_2006 = read_map(map3_path) # Read in the masking map mask = read_map(mask_path) # Analyse the input maps for evaluation purposes. map_dimensions = np.shape(map_1990) rows = map_dimensions[0] cols = map_dimensions[1] # Specify the maximum neighoburhood size distance considered. max_distance = 5 # Determine the distances that will be analysed. temp = considered_distances(max_distance) # Store the list of considered distances as a variable.
#mask_path=str(sys.argv[3]) #map1_path = "/home/a1091793/Documents/Modelling/RunMetroCal/individual_2/replicate_0/Data2/lu1989.asc" #map2_path = "/home/a1091793/Documents/Modelling/RunMetroCal/individual_2/replicate_0/Data2/lu2000.asc" #mask_path = "/home/a1091793/Documents/Modelling/RunMetroCal/individual_2/replicate_0/Data2/region.asc" map2_path = r"/home/a1091793/Documents/Modelling/Run/individual_0/replicate_0/Data2/lu2000.asc" map1_path = r"/home/a1091793/Documents/Modelling/Run/individual_0/replicate_0/Log/Land_use/Land use map_2000-Jan-01 00_00_00.rst" mask_path = r"/home/a1091793/Documents/Modelling/Run/individual_0/replicate_0/Data2/region.asc" from read_map import read_map import numpy as np from clumpy_module import clumpiness_index '''Maps----------------------------------------------------------------------''' map1=read_map(map1_path) map2=read_map(map2_path) mask=read_map(mask_path) '''Clumpiness_indices--------------------------------------------------------''' CLUMPY1=clumpiness_index(map1,mask) CLUMPY2=clumpiness_index(map2,mask) '''Average_absolute_clumpiness_error-----------------------------------------''' luc=np.amax(map1)+1 #pas=int(sys.argv[4]) #fea=int(sys.argv[5]) pas = 1 fea = 2 act=luc-(fea+pas) ACE=[0]*act for i in range(pas,luc-fea): ACE[i-pas]=abs(CLUMPY1[i]-CLUMPY2[i])