def __init__(self, region, tilex, tiley): """Create a tile based on the region and the tile's coordinates.""" # NB: smart people check that files have been gotten. # today we assume that's already been done. # snag stuff from the region first self.name = region.name self.size = region.tilesize self.mapname = region.mapname self.tilex = int(tilex) self.tiley = int(tiley) self.tiles = region.tiles self.doOre = region.doOre self.doSchematics = region.doSchematics if (self.tilex < self.tiles['xmin']) or (self.tilex >= self.tiles['xmax']): raise AttributeError, "tilex (%d) must be between %d and %d" % (self.tilex, self.tiles['xmin'], self.tiles['xmax']) if (self.tiley < self.tiles['ymin']) or (self.tiley >= self.tiles['ymax']): raise AttributeError, "tiley (%d) must be between %d and %d" % (self.tiley, self.tiles['ymin'], self.tiles['ymax']) # create the tile directory if necessary self.tiledir = os.path.join(region.regiondir, 'Tiles', '%dx%d' % (self.tilex, self.tiley)) self.skip = False if os.path.isfile(os.path.join(self.tiledir, 'Tile.yaml')): self.skip = True else: cleanmkdir(self.tiledir)
def __init__(self, region, tilex, tiley): """Create a tile based on the region and the tile's coordinates.""" # NB: smart people check that files have been gotten. # today we assume that's already been done. # snag stuff from the region first self.name = region.name self.size = region.tilesize self.mapfile = region.mapfile self.tilex = int(tilex) self.tiley = int(tiley) self.tiles = region.tiles self.doOre = region.doOre self.doSchematics = region.doSchematics if (self.tilex < self.tiles['xmin']) or (self.tilex >= self.tiles['xmax']): raise AttributeError( 'tilex (%d) must be between %d and %d' % (self.tilex, self.tiles['xmin'], self.tiles['xmax'])) if (self.tiley < self.tiles['ymin']) or (self.tiley >= self.tiles['ymax']): raise AttributeError( 'tiley (%d) must be between %d and %d' % (self.tiley, self.tiles['ymin'], self.tiles['ymax'])) # create the tile directory if necessary self.tiledir = os.path.join(region.regiondir, 'Tiles', '%dx%d' % (self.tilex, self.tiley)) cleanmkdir(self.tiledir)
def main(): """The main routine.""" parser = argparse.ArgumentParser( description='Builds c10t maps for regions.') parser.add_argument('--name', required=True, type=str, help='name of region to be mapped') parser.add_argument('--gmaps', action='store_true', help='generate Google Maps') args = parser.parse_args() print "Building %smaps for %s..." % ('Google ' if args.gmaps else '', args.name) (centerx, centerz) = center(args.name) if args.gmaps: cleanmkdir(os.path.join('maps', args.name)) command = 'C10T=../c10t/build/c10t ../c10t/scripts/google-api/google-api.sh -w worlds/%s -o maps/%s -O "-M 2048 -z --center %d,%d"' % ( args.name, args.name, centerx, centerz) else: command = '../c10t/build/c10t -M 2048 -z -w worlds/%s -o maps/%s.png --center %d,%d' % ( args.name, args.name, centerx, centerz) os.system(command)
def buildvrts(self): """Extracts image files and merges as necessary.""" layerIDs = [ name for name in os.listdir(self.mapsdir) if os.path.isdir(os.path.join(self.mapsdir, name)) ] if layerIDs == []: raise IOError, 'No files found' for layerID in layerIDs: (pType, iType, mType, cType) = self.decodeLayerID(layerID) filesuffix = cType.lower() layerdir = os.path.join(self.mapsdir, layerID) compfiles = [ name for name in os.listdir(layerdir) if (os.path.isfile(os.path.join(layerdir, name)) and name.endswith(filesuffix)) ] for compfile in compfiles: (compbase, compext) = os.path.splitext(compfile) fullfile = os.path.join(layerdir, compfile) datasubdir = os.path.join(layerdir, compbase) compfile = '%s.%s' % (compbase, iType) # tar (at least) expects Unix pathnames compimage = '/'.join([compbase, compfile]) cleanmkdir(datasubdir) if (Region.zipfileBroken == False): if (cType == "tgz"): cFile = tarfile.open(fullfile) elif (cType == "zip"): cFile = zipfile.ZipFile(fullfile) cFile.extract(compimage, layerdir) cFile.close() else: if (cType == "tgz"): cFile = tarfile.open(fullfile) cFile.extract(compimage, layerdir) elif (cType == "zip"): omfgcompimage = os.path.join(compbase, compfile) os.mkdir(os.path.dirname(os.path.join(datasubdir, compimage))) cFile = zipfile.ZipFile(fullfile) cFile.extract(omfgcompimage, datasubdir) os.rename(os.path.join(datasubdir, omfgcompimage), os.path.join(layerdir, compimage)) cFile.close() # convert tif to good SRS rawfile = os.path.join(layerdir, compbase, compfile) goodfile = os.path.join(layerdir, compbase, "%s.good%s" % (compbase, iType)) warpcmd = 'gdalwarp -q -multi -t_srs "%s" %s %s' % (Region.t_srs, rawfile, goodfile) os.system('%s' % warpcmd) vrtfile = os.path.join(layerdir, '%s.vrt' % layerID) buildvrtcmd = 'gdalbuildvrt %s %s' % (vrtfile, ' '.join(['"%s"' % x for x in locate('*.good*', root=layerdir)])) os.system('%s' % buildvrtcmd)
def main(): """The main routine.""" parser = argparse.ArgumentParser(description='Builds c10t maps for regions.') parser.add_argument('--name', required=True, type=str, help='name of region to be mapped') parser.add_argument('--gmaps', action='store_true', help='generate Google Maps') args = parser.parse_args() print "Building %smaps for %s..." % ('Google ' if args.gmaps else '', args.name) (centerx, centerz) = center(args.name) if args.gmaps: cleanmkdir(os.path.join('maps', args.name)) command = 'C10T=../c10t/build/c10t ../c10t/scripts/google-api/google-api.sh -w worlds/%s -o maps/%s -O "-M 2048 -z --center %d,%d"' % (args.name, args.name, centerx, centerz) else: command = '../c10t/build/c10t -M 2048 -z -w worlds/%s -o maps/%s.png --center %d,%d' % (args.name, args.name, centerx, centerz) os.system(command)
def __init__(self, name, xmax, xmin, ymax, ymin, tilesize=None, scale=None, vscale=None, trim=None, sealevel=None, maxdepth=None, oiIDs=None, lcIDs=None, elIDs=None, doOre=True, doSchematics=False): """Create a region based on lat-longs and other parameters.""" # NB: smart people check names self.name = name # Zone computation for UTM lat_center = (ymax + ymin) / 2. lon_center = (xmax + xmin) / 2. zone_num = utmll.latlon_to_zone_number(lat_center, lon_center) self.utm = utmll.latlon_to_zone_identifier(lat_center, lon_center) self.t_srs = self.t_srs % zone_num # tile must be an even multiple of chunk width # chunkWidth not defined in pymclevel but is hardcoded everywhere if tilesize == None: tilesize = Region.tilesize else: if (tilesize % 16 != 0): raise AttributeError, 'bad tilesize %s' % tilesize self.tilesize = tilesize # scale can be any positive integer if scale == None: scale = Region.scale else: if (scale > 0): self.scale = int(scale) else: raise AttributeError, 'bad scale %s' % scale # sealevel and maxdepth are not checked until after files are retrieved if sealevel == None: sealevel = Region.sealevel else: self.sealevel = sealevel if maxdepth == None: maxdepth = Region.maxdepth else: self.maxdepth = maxdepth # trim and vscale are not checked until after files are retrieved if trim == None: trim = Region.trim else: self.trim = trim if vscale == None: vscale = Region.vscale else: self.vscale = vscale # disable overly dense elevation products self.productIDs = Region.productIDs # NB: ND9 no longer supported # if (scale > 5): # self.productIDs['elevation'].remove('ND9') if (scale > 15): self.productIDs['elevation'].remove('N3F') # specified IDs must be in region list if oiIDs == None: orthoIDs = self.productIDs['ortho'] else: orthoIDs = [ ID for ID in oiIDs if ID in self.productIDs['ortho'] ] if orthoIDs == []: raise AttributeError, 'invalid ortho ID' if lcIDs == None: landcoverIDs = self.productIDs['landcover'] else: landcoverIDs = [ ID for ID in lcIDs if ID in self.productIDs['landcover'] ] if landcoverIDs == []: raise AttributeError, 'invalid landcover ID' if elIDs == None: elevationIDs = self.productIDs['elevation'] else: elevationIDs = [ ID for ID in elIDs if ID in self.productIDs['elevation'] ] if elevationIDs == []: raise AttributeError, 'invalid elevation ID' # enable or disable ore and schematics self.doOre = doOre self.doSchematics = doSchematics # crazy directory fun self.regiondir = os.path.join(Region.regiontop, self.name) cleanmkdir(self.regiondir) self.mapsdir = os.path.join(self.regiondir, 'Datasets') cleanmkdir(self.mapsdir) self.mapname = os.path.join(self.regiondir, 'Map.tif') # these are the latlong values self.llextents = { 'xmax': max(xmax, xmin), 'xmin': min(xmax, xmin), 'ymax': max(ymax, ymin), 'ymin': min(ymax, ymin) } # access the web service # NB: raise hell if it is inaccessible wsdlConv = "http://extract.cr.usgs.gov/XMLWebServices/Coordinate_Conversion_Service.asmx?WSDL" clientConv = suds.client.Client(wsdlConv) # This web service returns suds.sax.text.Text not XML sigh Convre = "<X Coordinate>(.*?)</X Coordinate > <Y Coordinate>(.*?)</Y Coordinate >" # convert from WGS84 to Albers ULdict = {'X_Value': self.llextents['xmin'], 'Y_Value': self.llextents['ymin'], 'Current_Coordinate_System': Region.wgs84, 'Target_Coordinate_System': Region.utm} (ULx, ULy) = re.findall(Convre, clientConv.service.getCoordinates(**ULdict))[0] URdict = {'X_Value': self.llextents['xmax'], 'Y_Value': self.llextents['ymin'], 'Current_Coordinate_System': Region.wgs84, 'Target_Coordinate_System': Region.utm} (URx, URy) = re.findall(Convre, clientConv.service.getCoordinates(**URdict))[0] LLdict = {'X_Value': self.llextents['xmin'], 'Y_Value': self.llextents['ymax'], 'Current_Coordinate_System': Region.wgs84, 'Target_Coordinate_System': Region.utm} (LLx, LLy) = re.findall(Convre, clientConv.service.getCoordinates(**LLdict))[0] LRdict = {'X_Value': self.llextents['xmax'], 'Y_Value': self.llextents['ymax'], 'Current_Coordinate_System': Region.wgs84, 'Target_Coordinate_System': Region.utm} (LRx, LRy) = re.findall(Convre, clientConv.service.getCoordinates(**LRdict))[0] # select maximum values for landcover extents xfloat = [float(x) for x in [ULx, URx, LLx, LRx]] yfloat = [float(y) for y in [ULy, URy, LLy, LRy]] mxmax = max(xfloat) mxmin = min(xfloat) mymax = max(yfloat) mymin = min(yfloat) # calculate tile edges realsize = self.scale * self.tilesize self.tiles = { 'xmax': int(ceil(mxmax / realsize)), 'xmin': int(floor(mxmin / realsize)), 'ymax': int(ceil(mymax / realsize)), 'ymin': int(floor(mymin / realsize)) } self.utmextents = { 'ortho': dict(), 'landcover': dict(), 'elevation': dict() } self.wgs84extents = { 'ortho': dict(), 'landcover': dict(), 'elevation': dict() } # landcover has a maxdepth-sized border self.utmextents['elevation'] = { 'xmax': self.tiles['xmax'] * realsize, 'xmin': self.tiles['xmin'] * realsize, 'ymax': self.tiles['ymax'] * realsize, 'ymin': self.tiles['ymin'] * realsize } borderwidth = self.maxdepth * self.scale self.utmextents['landcover'] = { 'xmax': self.utmextents['elevation']['xmax'] + borderwidth, 'xmin': self.utmextents['elevation']['xmin'] - borderwidth, 'ymax': self.utmextents['elevation']['ymax'] + borderwidth, 'ymin': self.utmextents['elevation']['ymin'] - borderwidth } self.utmextents['ortho'] = { 'xmax': self.tiles['xmax'] * realsize, 'xmin': self.tiles['xmin'] * realsize, 'ymax': self.tiles['ymax'] * realsize, 'ymin': self.tiles['ymin'] * realsize } # now convert back from Albers to WGS84 for maptype in ['ortho', 'landcover', 'elevation']: ULdict = {'X_Value': self.utmextents[maptype]['xmin'], \ 'Y_Value': self.utmextents[maptype]['ymin'], \ 'Current_Coordinate_System': Region.utm, \ 'Target_Coordinate_System': Region.wgs84} (ULx, ULy) = re.findall(Convre, clientConv.service.getCoordinates(**ULdict))[0] URdict = {'X_Value': self.utmextents[maptype]['xmax'], \ 'Y_Value': self.utmextents[maptype]['ymin'], \ 'Current_Coordinate_System': Region.utm, \ 'Target_Coordinate_System': Region.wgs84} (URx, URy) = re.findall(Convre, clientConv.service.getCoordinates(**URdict))[0] LLdict = {'X_Value': self.utmextents[maptype]['xmin'], \ 'Y_Value': self.utmextents[maptype]['ymax'], \ 'Current_Coordinate_System': Region.utm, \ 'Target_Coordinate_System': Region.wgs84} (LLx, LLy) = re.findall(Convre, clientConv.service.getCoordinates(**LLdict))[0] LRdict = {'X_Value': self.utmextents[maptype]['xmax'], \ 'Y_Value': self.utmextents[maptype]['ymax'], \ 'Current_Coordinate_System': Region.utm, \ 'Target_Coordinate_System': Region.wgs84} (LRx, LRy) = re.findall(Convre, clientConv.service.getCoordinates(**LRdict))[0] # select maximum values xfloat = [float(x) for x in [ULx, URx, LLx, LRx]] yfloat = [float(y) for y in [ULy, URy, LLy, LRy]] self.wgs84extents[maptype] = { 'xmax': max(xfloat), 'xmin': min(xfloat), 'ymax': max(yfloat), 'ymin': min(yfloat) } # check availability of product IDs and identify specific layer IDs self.oilayer = self.checkavail(orthoIDs, 'ortho') self.lclayer = self.checkavail(landcoverIDs, 'landcover') self.ellayer = self.checkavail(elevationIDs, 'elevation') # write the values to the file stream = file(os.path.join(self.regiondir, 'Region.yaml'), 'w') yaml.dump(self, stream) stream.close()
def __init__(self, name, xmax, xmin, ymax, ymin, tilesize=None, scale=None, vscale=None, trim=None, sealevel=None, maxdepth=None, lcIDs=None, elIDs=None, doOre=True, doSchematics=False): """Create a region based on lat-longs and other parameters.""" # NB: smart people check names self.name = name # tile must be an even multiple of chunk width # chunkWidth not defined in pymclevel but is hardcoded everywhere if tilesize is None: tilesize = Region.tilesize else: if tilesize % 16 != 0: raise AttributeError('bad tilesize %s' % tilesize) self.tilesize = tilesize # scale can be any positive integer if scale is None: scale = Region.scale else: if scale > 0: self.scale = int(scale) else: raise AttributeError('bad scale %s' % scale) # sealevel and maxdepth are not checked until after files are retrieved if sealevel is None: sealevel = Region.sealevel else: self.sealevel = sealevel if maxdepth is None: maxdepth = Region.maxdepth else: self.maxdepth = maxdepth # trim and vscale are not checked until after files are retrieved if trim is None: trim = Region.trim else: self.trim = trim if vscale is None: vscale = Region.vscale else: self.vscale = vscale # disable overly dense elevation products self.productIDs = Region.productIDs # NB: ND9 no longer supported # if scale > 5: # self.productIDs['elevation'].remove('ND9') if scale > 15: self.productIDs['elevation'].remove('N3F') # specified IDs must be in region list if lcIDs is None: landcoverIDs = self.productIDs['landcover'] else: landcoverIDs = [ ID for ID in lcIDs if ID in self.productIDs['landcover'] ] if landcoverIDs == []: raise AttributeError('invalid landcover ID') if elIDs is None: elevationIDs = self.productIDs['elevation'] else: elevationIDs = [ ID for ID in elIDs if ID in self.productIDs['elevation'] ] if elevationIDs == []: raise AttributeError('invalid elevation ID') # enable or disable ore and schematics self.doOre = doOre self.doSchematics = doSchematics # crazy directory fun cleanmkdir(self.regiondir) cleanmkdir(self.mapsdir) # these are the latlong values self.llextents = { 'xmax': max(xmax, xmin), 'xmin': min(xmax, xmin), 'ymax': max(ymax, ymin), 'ymin': min(ymax, ymin) } # Convert from WGS84 to Albers. [mxmax, mxmin, mymax, mymin] = Region.get_corners(Region.wgs84, Region.albers, xmax, xmin, ymax, ymin) # calculate tile edges realsize = self.scale * self.tilesize self.tiles = { 'xmax': int(ceil(mxmax / realsize)), 'xmin': int(floor(mxmin / realsize)), 'ymax': int(ceil(mymax / realsize)), 'ymin': int(floor(mymin / realsize)) } self.albersextents = {'landcover': dict(), 'elevation': dict()} self.wgs84extents = {'landcover': dict(), 'elevation': dict()} # Landcover needs a maxdepth-sized border for bathy calculations. self.albersextents['elevation'] = { 'xmax': self.tiles['xmax'] * realsize, 'xmin': self.tiles['xmin'] * realsize, 'ymax': self.tiles['ymax'] * realsize, 'ymin': self.tiles['ymin'] * realsize } borderwidth = self.maxdepth * self.scale self.albersextents['landcover'] = { 'xmax': self.albersextents['elevation']['xmax'] + borderwidth, 'xmin': self.albersextents['elevation']['xmin'] - borderwidth, 'ymax': self.albersextents['elevation']['ymax'] + borderwidth, 'ymin': self.albersextents['elevation']['ymin'] - borderwidth } # Now convert back from Albers to WGS84. for maptype in ['landcover', 'elevation']: [wxmax, wxmin, wymax, wymin] = Region.get_corners(Region.albers, Region.wgs84, self.albersextents[maptype]['xmax'], self.albersextents[maptype]['xmin'], self.albersextents[maptype]['ymax'], self.albersextents[maptype]['ymin']) self.wgs84extents[maptype] = { 'xmax': wxmax, 'xmin': wxmin, 'ymax': wymax, 'ymin': wymin } # check availability of product IDs and identify specific layer IDs self.lclayer = self.check_availability(landcoverIDs, 'landcover') self.ellayer = self.check_availability(elevationIDs, 'elevation') # write the values to the file stream = file(os.path.join(self.regionfile), 'w') yaml.dump(self, stream) stream.close()
def __init__(self, name, xmax, xmin, ymax, ymin, tilesize=None, scale=None, vscale=None, trim=None, sealevel=None, maxdepth=None, lcIDs=None, elIDs=None, doOre=True, doSchematics=False): """Create a region based on lat-longs and other parameters.""" # NB: smart people check names self.name = name # tile must be an even multiple of chunk width # chunkWidth not defined in pymclevel but is hardcoded everywhere if tilesize is None: tilesize = Region.tilesize else: if tilesize % 16 != 0: raise AttributeError('bad tilesize %s' % tilesize) self.tilesize = tilesize # scale can be any positive integer if scale is None: scale = Region.scale else: if scale > 0: self.scale = int(scale) else: raise AttributeError('bad scale %s' % scale) # sealevel and maxdepth are not checked until after files are retrieved if sealevel is None: sealevel = Region.sealevel else: self.sealevel = sealevel if maxdepth is None: maxdepth = Region.maxdepth else: self.maxdepth = maxdepth # trim and vscale are not checked until after files are retrieved if trim is None: trim = Region.trim else: self.trim = trim if vscale is None: vscale = Region.vscale else: self.vscale = vscale # disable overly dense elevation products self.productIDs = Region.productIDs # NB: ND9 no longer supported # if scale > 5: # self.productIDs['elevation'].remove('ND9') if scale > 15: self.productIDs['elevation'].remove('N3F') # specified IDs must be in region list if lcIDs is None: landcoverIDs = self.productIDs['landcover'] else: landcoverIDs = [ID for ID in lcIDs if ID in self.productIDs['landcover']] if landcoverIDs == []: raise AttributeError('invalid landcover ID') if elIDs is None: elevationIDs = self.productIDs['elevation'] else: elevationIDs = [ID for ID in elIDs if ID in self.productIDs['elevation']] if elevationIDs == []: raise AttributeError('invalid elevation ID') # enable or disable ore and schematics self.doOre = doOre self.doSchematics = doSchematics # crazy directory fun cleanmkdir(self.regiondir) cleanmkdir(self.mapsdir) # these are the latlong values self.llextents = {'xmax': max(xmax, xmin), 'xmin': min(xmax, xmin), 'ymax': max(ymax, ymin), 'ymin': min(ymax, ymin)} # Convert from WGS84 to Albers. [mxmax, mxmin, mymax, mymin] = Region.get_corners(Region.wgs84, Region.albers, xmax, xmin, ymax, ymin) # calculate tile edges realsize = self.scale * self.tilesize self.tiles = {'xmax': int(ceil(mxmax / realsize)), 'xmin': int(floor(mxmin / realsize)), 'ymax': int(ceil(mymax / realsize)), 'ymin': int(floor(mymin / realsize))} self.albersextents = {'landcover': dict(), 'elevation': dict()} self.wgs84extents = {'landcover': dict(), 'elevation': dict()} # Landcover needs a maxdepth-sized border for bathy calculations. self.albersextents['elevation'] = {'xmax': self.tiles['xmax'] * realsize, 'xmin': self.tiles['xmin'] * realsize, 'ymax': self.tiles['ymax'] * realsize, 'ymin': self.tiles['ymin'] * realsize} borderwidth = self.maxdepth * self.scale self.albersextents['landcover'] = {'xmax': self.albersextents['elevation']['xmax'] + borderwidth, 'xmin': self.albersextents['elevation']['xmin'] - borderwidth, 'ymax': self.albersextents['elevation']['ymax'] + borderwidth, 'ymin': self.albersextents['elevation']['ymin'] - borderwidth} # Now convert back from Albers to WGS84. for maptype in ['landcover', 'elevation']: [wxmax, wxmin, wymax, wymin] = Region.get_corners(Region.albers, Region.wgs84, self.albersextents[maptype]['xmax'], self.albersextents[maptype]['xmin'], self.albersextents[maptype]['ymax'], self.albersextents[maptype]['ymin']) self.wgs84extents[maptype] = {'xmax': wxmax, 'xmin': wxmin, 'ymax': wymax, 'ymin': wymin} # check availability of product IDs and identify specific layer IDs self.lclayer = self.check_availability(landcoverIDs, 'landcover') self.ellayer = self.check_availability(elevationIDs, 'elevation') # write the values to the file stream = file(os.path.join(self.regionfile), 'w') yaml.dump(self, stream) stream.close()
def main(): """Builds a region.""" # example: # ./BuildRegion.py --name BlockIsland # parse options and get results parser = argparse.ArgumentParser(description='Builds Minecraft worlds from regions.') parser.add_argument('--name', required=True, type=str, \ help='name of the region to be built') parser.add_argument('--single', action='store_true', \ help='enable single-threaded mode for debugging or profiling') parser.add_argument('--safemerge', action='store_true', \ help='use \"safer\" method of merging tiles together') parser.add_argument("-v", "--verbosity", action="count", \ help="increase output verbosity") parser.add_argument("-q", "--quiet", action="store_true", \ help="suppress informational output") args = parser.parse_args() # set up logging log_level = klog_levels.LOG_INFO if args.quiet: log_level = klog_levels.LOG_ERROR if args.verbosity: # v=1 is DEBUG 1, v=2 is DEBUG 2, and so on log_level += args.verbosity log = klogger(log_level) # build the region log.log_info("Building region %s..." % args.name) yamlfile = file(os.path.join('Regions', args.name, 'Region.yaml')) myRegion = yaml.load(yamlfile) yamlfile.close() # exit if map does not exist if not os.path.exists(myRegion.mapname): log.log_fatal("No map file exists!") # tree and ore variables treeobjs = dict([(tree.name, tree) for tree in treeObjs]) trees = dict([(name, list()) for name in treeobjs]) oreobjs = dict([(ore.name, ore) for ore in oreObjs]) ores = dict([(name, list()) for name in oreobjs]) # generate overall world worlddir = os.path.join('Worlds', args.name) world = mclevel.MCInfdevOldLevel(worlddir, create=True) peak = [0, 0, 0] save(world) world = None # generate individual tiles tilexrange = xrange(myRegion.tiles['xmin'], myRegion.tiles['xmax']) tileyrange = xrange(myRegion.tiles['ymin'], myRegion.tiles['ymax']) name = myRegion.name tiles = [(log, name, x, y) for x, y in product(tilexrange, tileyrange)] if args.single: # single process version log.log_warn("Single-threaded region merge") for tile in tiles: buildtile(tile) else: # multi-process version pool = Pool() rs = pool.map_async(buildtile, tiles) pool.close() while not(rs.ready()): remaining = rs._number_left log.log_info("Waiting for %s buildtile tasks to complete..." % remaining) time.sleep(10) pool.join() # Just as a precaution. # Necessary for tile-welding -> regions cleanmkdir(worlddir) cleanmkdir(os.path.join(worlddir, 'region')) # Generate regions if not(args.safemerge): regionsize = 32 * 16 regionxrange = xrange(int(floor(myRegion.tiles['xmin'] * (myRegion.tilesize / float(regionsize)))), \ int(ceil(myRegion.tiles['xmax'] * (myRegion.tilesize / float(regionsize))))) regionyrange = xrange(int(floor(myRegion.tiles['ymin'] * (myRegion.tilesize / float(regionsize)))), \ int(ceil(myRegion.tiles['ymax'] * (myRegion.tilesize / float(regionsize))))) regions = [(log, name, x, y) for x, y in product(regionxrange, regionyrange)] # merge individual tiles into regions log.log_info("Merging %d tiles into one world..." % len(tiles)) for tile in tiles: (dummy, name, x, y) = tile tiledir = os.path.join('Regions', name, 'Tiles', '%dx%d' % (x, y)) if not(os.path.isfile(os.path.join(tiledir, 'Tile.yaml'))): log.log_fatal("The following tile is missing. Please re-run this script:\n%s" % \ os.path.join(tiledir, 'Tile.yaml')) if args.single: # single process version log.log_warn("Single-threaded region merge") for region in regions: buildregion(region) else: # multi-process version pool = Pool() rs = pool.map_async(buildregion, regions) pool.close() while not(rs.ready()): remaining = rs._number_left log.log_info("Waiting for %s buildregion tasks to complete..." % remaining) time.sleep(10) pool.join() # Just as a precaution. world = mclevel.MCInfdevOldLevel(worlddir, create=True) if not(args.safemerge): mcoffsetx = myRegion.tiles['xmin'] * myRegion.tilesize mcoffsetz = myRegion.tiles['ymin'] * myRegion.tilesize mcsizex = (myRegion.tiles['xmax'] - myRegion.tiles['xmin']) * myRegion.tilesize mcsizez = (myRegion.tiles['ymax'] - myRegion.tiles['ymin']) * myRegion.tilesize tilebox = box.BoundingBox((mcoffsetx, 0, mcoffsetz), (mcsizex, world.Height, mcsizez)) world.createChunksInBox(tilebox) for tile in tiles: (dummy, name, x, y) = tile tiledir = os.path.join('Regions', name, 'Tiles', '%dx%d' % (x, y)) tilefile = file(os.path.join(tiledir, 'Tile.yaml')) newtile = yaml.load(tilefile) tilefile.close() if (newtile.peak[1] > peak[1]): peak = newtile.peak for treetype in newtile.trees: trees.setdefault(treetype, []).extend(newtile.trees[treetype]) if myRegion.doOre: for oretype in newtile.ores: ores.setdefault(oretype, []).extend(newtile.ores[oretype]) if args.safemerge: tileworld = mclevel.MCInfdevOldLevel(tiledir, create=False) world.copyBlocksFrom(tileworld, tileworld.bounds, tileworld.bounds.origin) tileworld = False # plant trees in our world log.log_info("Planting %d trees at the region level..." % \ sum([len(trees[treetype]) for treetype in trees])) Tree.placetreesinregion(trees, treeobjs, world) # deposit ores in our world if myRegion.doOre: log.log_info("Depositing %d ores at the region level..." % \ sum([len(ores[oretype]) for oretype in ores])) Ore.placeoreinregion(ores, oreobjs, world) # tie up loose ends world.setPlayerGameType(1) setspawnandsave(world, peak) oldyamlpath = os.path.join('Regions', args.name, 'Region.yaml') newyamlpath = os.path.join('Worlds', args.name, 'Region.yaml') shutil.copy(oldyamlpath, newyamlpath) shutil.rmtree(os.path.join('Regions', name, 'Tiles'))