def start_ssh(self, url): """ start an ssh connection :param url: :return: """ # use current user if none was passed in. if self.ssh_user is None: self.ssh_user = getpass.getuser() # if we haven't already started this connection, start it if url not in self.connections: try: # paramiko.util.log_to_file("paramiko.log") ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.allow_agent = False ssh.connect(url, username=self.ssh_user, password=self.pw) except Exception as e: out(0, red("SSH connection to {} failed: ".format(url))) print(e) return False # add this connection to the list of open connections self.connections[url] = ssh # set ssh_alias as the current connection key to be used in exec_shell self.current_conn_key = url return True
def update_server(self, ssh_alias=None, url=None, git_user='******'): """ Update Individual Server :param ssh_alias: :param url: :param git_user: :return: """ # run this file on the desired server. command = "python -u " + self.gpull_local_location if ssh_alias is not None: # start a remote connection to the server command += " -u {} -e {} -n '{}' ".format(git_user, self.email_to, self.ssh_user) if self.start_ssh(url) is False: # failed connection, so don't continue updating directories return False # add path: command += " -p {}".format(' '.join(self.dir)) # run through optional commands (force, branch) if self.branch is not None: command += " -b {}".format(self.branch) if self.force: command += " -f " if self.all_dirs: command += " -a " out(0, green("running git updates on " + url)) out(0, self.exec_shell(command))
def get_maps(path): min_x = None min_z = None max_x = None max_z = None map_files = os.listdir(path) maps = [] for filename in map_files: if filename.find("map_") == 0: map = MCMap(path + filename) scale = 2 ** map.scale; real_width = map.width * scale real_height = map.height * scale left = map.xCenter - (real_width / 2) top = map.zCenter - (real_height / 2) right = map.xCenter + (real_width / 2) bottom = map.zCenter + (real_height / 2) if min_x is None or left < min_x: min_x = left if min_z is None or top < min_z: min_z = top if max_x is None or right > max_x: max_x = right if max_z is None or bottom > max_z: max_z = bottom maps.append(map) out("Loaded " + str(len(maps)) + " maps.\n") return (min_x, min_z, max_x, max_z, maps)
def exec_shell(self, command): """ Execute a shell command and get the output. :param command: script command :return: string | False """ if self.current_conn_key: ssh = self.connections[self.current_conn_key] encoded = pipes.quote(self.pw) sudo_cmd = "echo {pw} | sudo -S ".format(pw=encoded) stdin, stdout, stderr = ssh.exec_command(sudo_cmd + command, get_pty=True) out(0, stdout.read()) if stderr: for line in stderr.readlines(): line = line.strip() # ignore sudo password prompts if '[sudo] password for' not in line: out(0, line) else: try: # try to run the process, or return an error process = subprocess.Popen(shlex.split(command), bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = process.communicate() if stdout is not None: return stdout except subprocess.CalledProcessError as e: print("Could not finish your request: " + e.output.decode('UTF-8')) return False return ''
def execute(processCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() with Pool(processes=processCount) as pool: pi = 4.0 * delta * sum(pool.imap(partial(f, delta=delta), range(1, n), n // processCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(processCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() with Pool(processes=processCount) as pool: pi = 4.0 * delta * sum(pool.map(partial(f, delta=delta), range(1, n))) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(threadCount): n = 100000000 # 10 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() set_num_threads(threadCount) value = arange(n, dtype=double) pi = 4.0 * delta * evaluate('1.0 / (1.0 + ((value - 0.5) * delta) ** 2)').sum() elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def execute(processCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // processCount with Pool(processes=processCount) as pool: results = [pool.apply_async(processSlice, args=(i, sliceSize, delta)) for i in range(0, processCount)] pi = 4.0 * delta * sum(item.get() for item in results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(processCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n / processCount pool = Pool(processes=processCount) results = [pool.apply_async(processSlice, args=(i, sliceSize, delta)) for i in range(0, processCount)] pi = 4.0 * delta * sum(item.get() for item in results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(threadCount): n = 100000000 # 10 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() set_num_threads(threadCount) value = arange(n, dtype=double) pi = 4.0 * delta * evaluate( '1.0 / (1.0 + ((value - 0.5) * delta) ** 2)').sum() elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def execute(processCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() sliceSize = n // processCount with Pool(processes=processCount) as pool: results = [pool.apply_async(processSlice, args=(i, sliceSize, delta)) for i in range(0, processCount)] pi = 4.0 * delta * sum(item.get() for item in results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(processCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // processCount with ProcessPoolExecutor(max_workers=processCount) as executor: results = [executor.submit(processSlice, i, sliceSize, delta) for i in range(processCount)] pi = 4.0 * delta * sum(item.result() for item in results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(processCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() sliceSize = n / processCount server = Server(secret='blahblahblah') jobs = [server.submit(processSlice, (i, sliceSize, delta)) for i in xrange(0, processCount)] pi = 4.0 * delta * sum(job() for job in jobs) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount) server.print_stats()
def execute(processCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // processCount resultsQueue = Queue() processes = [Process(target=processSlice, args=(i, sliceSize, delta, resultsQueue)) for i in range(0, processCount)] for p in processes: p.start() pi = 4.0 * delta * sum(resultsQueue.get() for i in range(0, processCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(threadCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n / threadCount results = Queue(threadCount) threads = [Thread(target=processSlice, args=(i, sliceSize, delta, results)) for i in xrange(0, threadCount)] for thread in threads: thread.start() pi = 4.0 * delta * sum(results.get() for i in xrange(threadCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def execute(processCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // processCount with ProcessPoolExecutor(max_workers=processCount) as executor: results = [ executor.submit(processSlice, i, sliceSize, delta) for i in range(processCount) ] pi = 4.0 * delta * sum(item.result() for item in results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def execute(threadCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() sliceSize = n // threadCount global results results = Queue(threadCount) threads = [Thread(target=processSlice, args=(i, sliceSize, delta)) for i in range(0, threadCount)] for thread in threads: thread.start() pi = 4.0 * delta * sum(results.get() for i in range(threadCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def main(url=None): if not sys.stdin.isatty() and url is None: for line in sys.stdin.readlines(): line = line.replace("\n","") main(line) # recursively run every line return 0 # and then exit parser=argparse.ArgumentParser(description="Simple HTTPLib front-end",conflict_handler='resolve') parser.add_argument("path",nargs="?",default="/",type=str, help = "GET [path]",metavar='path') parser.add_argument("host",nargs="?",default="www.instagram.com",type=str, help = "hostname",metavar='hostname') parser.add_argument("--host",dest="host2",nargs="?",default=None,type=str, help = "overrides the other hostname argument", metavar="hostname") parser.add_argument("-v",action="count",default=0, help = "verbosity") parser.add_argument("-t","--test",action='store_true',default=False, help = "only request HEAD") args = parser.parse_args() if not args.host2 is None: args.host = args.host2 delattr(args,'host2') if not url is None: args.path = url if args.v>=3: print args if args.test: method = "HEAD" else: method = "GET" # finally, make connection if not args.v: try: data = connect(path=args.path,host=args.host,method=method) output.out(data) return 0 except AssertionError as e: output.err_out(e) return 1 else: resp = get_response(path = args.path, host = args.host, method=method) print "%s%s - %s %s"%(args.host,args.path,resp.status,resp.reason) if args.v>=2: for header in resp.getheaders(): print "%s: %s"%header output.out(resp.read()) return 0
def execute(processCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() sliceSize = n / processCount server = Server(secret='blahblahblah') jobs = [ server.submit(processSlice, (i, sliceSize, delta)) for i in xrange(0, processCount) ] pi = 4.0 * delta * sum(job() for job in jobs) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount) server.print_stats()
def execute(threadCount): n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() sliceSize = n // threadCount global results results = [] threads = [Thread(target=processSlice, args=(i, sliceSize, delta)) for i in range(0, threadCount)] for thread in threads: thread.start() for thread in threads: thread.join() pi = 4.0 * delta * sum(results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def execute(threadCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // threadCount results = Queue(threadCount) threads = [ Thread(target=processSlice, args=(i, sliceSize, delta, results)) for i in range(0, threadCount) ] for thread in threads: thread.start() pi = 4.0 * delta * sum(results.get() for i in range(threadCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, threadCount)
def execute(workerCount): n = 1000000000 delta = 1.0 / n startTime = time() sliceSize = n // workerCount context = create_some_context() queue = CommandQueue(context) with open('processSlice_opencl.cl', 'r') as f: kernel = Program(context, f.read()).build() results = numpy.array(range(workerCount), dtype=numpy.float64) buffer = Buffer(context, mem_flags.WRITE_ONLY, results.nbytes) kernel.processSlice(queue, results.shape, None, numpy.int32(sliceSize), numpy.float64(delta), buffer) enqueue_read_buffer(queue, buffer, results).wait() pi = 4.0 * delta * numpy.sum(results) elapseTime = time() - startTime out(__file__, pi, n, elapseTime)
def git_merge_all(self, from_branch, to_branch, working_path='/var/release'): """ Merge all Git Repositories from one branch into another. :param from_branch: What branch to merge from :param to_branch: What branch to merge into :param working_path: :return: """ if not os.path.exists(working_path): # if path doesn't exist, create it: os.mkdir(working_path) os.chdir(working_path) for repo in self.config.repositories: os.chdir(working_path) out(1, blue("\n------- REPO: " + repo + " -------")) # see if the repo exists path = working_path+'/'+repo output = '' try: if not os.path.exists(path): output += self.exec_shell('git clone '+self.git_server+'/'+repo+'.git ' + path) if 'Access denied.' in output: out(2, yellow('skipped')) continue os.chdir(path) output += self.exec_shell('git reset --hard HEAD') output += self.exec_shell('git checkout --force {}'.format(from_branch)) output += self.exec_shell('git pull') output += self.exec_shell('git checkout --force {}'.format(to_branch)) output += self.exec_shell('git pull') output += self.exec_shell('git merge {}'.format(from_branch)) output += self.exec_shell('git push origin {}'.format(to_branch)) for line in output.splitlines(True): if line.startswith('error') or line.startswith('CONFLICT'): out(2, red(line)) else: out(2, green(line)) except Exception as e: out(2, red('Error: ')) out(2, red(output)) out(2, red(e)) return False return output
def git_merge_all(self, from_branch, to_branch, working_path='/var/release'): """ Merge all Git Repositories from one branch into another. :param from_branch: What branch to merge from :param to_branch: What branch to merge into :param working_path: :return: """ if not os.path.exists(working_path): # if path doesn't exist, create it: os.mkdir(working_path) os.chdir(working_path) for repo in self.config.repositories: os.chdir(working_path) out(1, blue("\n------- REPO: " + repo + " -------")) # see if the repo exists path = working_path + '/' + repo output = '' try: if not os.path.exists(path): output += self.exec_shell('git clone ' + self.git_server + '/' + repo + '.git ' + path) if 'Access denied.' in output: out(2, yellow('skipped')) continue os.chdir(path) output += self.exec_shell('git reset --hard HEAD') output += self.exec_shell( 'git checkout --force {}'.format(from_branch)) output += self.exec_shell('git pull') output += self.exec_shell( 'git checkout --force {}'.format(to_branch)) output += self.exec_shell('git pull') output += self.exec_shell('git merge {}'.format(from_branch)) output += self.exec_shell( 'git push origin {}'.format(to_branch)) for line in output.splitlines(True): if line.startswith('error') or line.startswith('CONFLICT'): out(2, red(line)) else: out(2, green(line)) except Exception as e: out(2, red('Error: ')) out(2, red(output)) out(2, red(e)) return False return output
#!/usr/bin/python2 import output if __name__ == "__main__": output.out('line')
def accumulator(channel, n, delta, startTime, processCount): pi = 4.0 * delta * sum(channel() for i in xrange(0, processCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def accumulator(channel, n, delta, startTime, processCount): pi = 4.0 * delta * sum(channel.read() for i in range(0, processCount)) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def accumulator(): pi = 4.0 * delta * sum(channel.read() for channel in channels) elapseTime = time() - startTime out(__file__, pi, n, elapseTime, processCount)
def build(config): world_path = config['worldpath'] output_path = config['outputpath'] region_path = world_path + "/region/" if config['usemask']: (min_x, min_z, max_x, max_z, maps) = get_maps(world_path + "/data/") if len(maps) < 1: out("No maps to build from.\n") return combined_width = max_x - min_x combined_height = max_z - min_z out("Combined map: %d x %d (%d,%d)-(%d,%d)\n" % (combined_width,combined_height,min_x,min_z,max_x,max_z)) map_data = [0] * (combined_width * combined_height) regions_with_data = [] for map in maps: scale = 2 ** map.scale x = map.xCenter - (map.width * scale / 2) - min_x z = map.zCenter - (map.height * scale / 2) - min_z for cz in range(0, map.height): for cx in range(0, map.width): c = map.colors[cx + cz * map.width] #color_map[map.colors[cx + cz * map.width]] if c > 3: for row in range(0,scale): for col in range(0,scale): block_x = x + cx * scale + col block_z = z + cz * scale + row map_data[block_x + block_z * combined_width] = 1 # TODO write True into a region look up for "are there pixels here" out(".") out("\n") else: out("Ignore maps; building full sized combined map\n") region_files = os.listdir(region_path) region_files.sort(key=lambda x: int(x.split(".")[1]) + int(x.split(".")[2]) / 10000) map_data = None min_x = 0 min_z = 0 max_x = 0 max_z = 0 for f in region_files: chnk = f.split('.') left = int(chnk[1]) * 512 top = int(chnk[2]) * 512 right = left + 511 bottom = top + 511 if left < min_x: min_x = left if top < min_z: min_z = top if right > max_x: max_x = right if bottom > max_z: max_z = bottom region_files = os.listdir(region_path) out(str(len(region_files)) + " regions to check.\n") # TODO check for tile directory existence, create it if necessary if os.path.exists(output_path + '/tile/markers.json'): markers = json.load(open(output_path + '/tile/markers.json')) out("Loaded markers.json\n") else: markers = {} out("Creating markers.json\n") if os.path.exists(output_path + '/tile/tiles.json'): tiles = json.load(open(output_path + '/tile/tiles.json')) out("Loaded tiles.json\n") else: tiles = {} out("Creating tiles.json\n") # TODO start using a hints.json which stores whatever; specifically I can start # storing the southern edge of height maps so I can calculate the shading # on the next tile to the south #hints = [] # assumes at most 10000 regions square; that's an area of roughly 5120 km to a side so # it's probably okay for most worlds region_files.sort(key=lambda x: int(x.split(".")[1]) + int(x.split(".")[2]) / 10000) out("Region files sorted.\n") tile_generator = TileGenerator(config, map_data, make_colors(), min_x, min_z, max_x, max_z) for f in region_files: out(" " + f + ": ") f_parts = f.split(".") tx = f_parts[1] ty = f_parts[2] output_f = output_path + "/tile/tile." + tx + "." + ty + ".png" tiles[f] = { 'src':"tile/tile." + tx + "." + ty + ".png", 'x':int(tx)*512, 'y':int(ty)*512 } if int(tx) * 512 > max_x or int(ty) * 512 > max_z or (int(tx) + 1) * 512 < min_x or (int(ty) + 1) * 512 < min_z: out(" Region is entirely outside of mapped area. Skipping.\n") continue if (not config['forcebuild'] and os.path.exists(output_f) and os.path.getmtime(output_f) > os.path.getmtime(region_path + f)): out(" Tile exists and is newer than the region file. Skipping.\n") continue region = AnvilRegion(region_path + f) out(" " + str(len(region.chunks)) + " chunks loaded"); (tile_im, m) = tile_generator.makeTile(region) markers[f] = m tile_im.save(output_f) out("\n") f = open(output_path + "/tile/markers.json", "w") json.dump(markers, f) out("Wrote markers.json\n") f = open(output_path + "/tile/tiles.json", "w") json.dump(tiles, f) out("Wrote tiles.json\n")
def out(): ret = output.out() return str(ret)
def build(world_path, output_path): (min_x, min_z, max_x, max_z, maps) = get_maps(world_path + "/data/") if len(maps) < 1: out("No maps to build from.\n") return combined_width = max_x - min_x combined_height = max_z - min_z out("Combined map: %d x %d (%d,%d)-(%d,%d)\n" % (combined_width,combined_height,min_x,min_z,max_x,max_z)) map_data = [0] * (combined_width * combined_height) regions_with_data = [] for map in maps: scale = 2 ** map.scale x = map.xCenter - (map.width * scale / 2) - min_x z = map.zCenter - (map.height * scale / 2) - min_z for cz in range(0, map.height): for cx in range(0, map.width): c = map.colors[cx + cz * map.width] #color_map[map.colors[cx + cz * map.width]] if c > 3: for row in range(0,scale): for col in range(0,scale): block_x = x + cx * scale + col block_z = z + cz * scale + row map_data[block_x + block_z * combined_width] = 1 # TODO write True into a region look up for "are there pixels here" out(".") out("\n") path = world_path + "/region/" region_files = os.listdir(path) out(str(len(region_files)) + " regions to check.\n") if os.path.exists(output_path + '/tile/markers.json'): markers = json.load(open(output_path + '/tile/markers.json')) out("Loaded markers.json\n") else: markers = {} out("Creating markers.json\n") if os.path.exists(output_path + '/tile/tiles.json'): tiles = json.load(open(output_path + '/tile/tiles.json')) out("Loaded tiles.json\n") else: tiles = {} out("Creating tiles.json\n") # TODO start using a hints.json which stores whatever; specifically I can start # storing the southern edge of height maps so I can calculate the shading # on the next tile to the south hints = [] # assumes at most 10000 regions square; that's an area of roughly 5120 km to a side so # it's probably okay for most worlds # TODO this sorting actually doesn't really work; it reverses negative y region_files.sort(key=lambda x: int(x.split(".")[1]) + float(x.split(".")[2]) / 10000) out("Region files sorted.\n") tile_generator = TileGenerator(map_data, make_colors(), min_x, min_z, max_x, max_z) for f in region_files: out(" " + f + ": ") f_parts = f.split(".") tx = f_parts[1] ty = f_parts[2] output_f = output_path + "/tile/tile." + tx + "." + ty + ".png" tiles[f] = { 'src':"tile/tile." + tx + "." + ty + ".png", 'x':int(tx)*512, 'y':int(ty)*512 } if int(tx) * 512 > max_x or int(ty) * 512 > max_z or (int(tx) + 1) * 512 < min_x or (int(ty) + 1) * 512 < min_z: out(" Region is entirely outside of mapped area. Skipping.\n") continue if (os.path.exists(output_f) and os.path.getmtime(output_f) > os.path.getmtime(path + f)): out(" Tile exists and is newer than the region file. Skipping.\n") continue region = AnvilRegion(path + f) out(" " + str(len(region.chunks)) + " chunks loaded"); (tile_im, m) = tile_generator.makeTile(region) markers[f] = m tile_im.save(output_f) out("\n") f = open(output_path + "/tile/markers.json", "w") json.dump(markers, f) out("Wrote markers.json\n") f = open(output_path + "/tile/tiles.json", "w") json.dump(tiles, f) out("Wrote tiles.json\n")
#! /usr/bin/env python # -*- coding:utf-8; -*- # Calculation of π using quadrature. Sequential algorithm using reduce. # # Copyright © 2008–2012 Russel Winder from output import out from time import time if __name__ == '__main__': n = 10000000 # 100 times fewer than C due to speed issues. delta = 1.0 / n startTime = time() pi = 4.0 * delta * reduce(lambda s, i: s + 1.0 / (1.0 + ((i - 0.5) * delta) ** 2), xrange(n), 0.0) elapseTime = time() - startTime out(__file__, pi, n, elapseTime)
elif int(cont) == 2: break break elif int(choice) == 3: dt = cutting.cut(df14_20) prev = df14_20 df14_20 = dt print("まだ操作を行いますか") cont = input("はい:1 ,いいえ:2---->") if int(cont) == 1: continue elif int(cont) == 2: break break elif int(choice) == 4: output.out(df14_20) print("まだ操作を行いますか") cont = input("はい:1 ,いいえ:2---->") if int(cont) == 1: continue elif int(cont) == 2: break break elif int(choice) == 5: pd.set_option("display.max_rows", 1000) print(df14_20) break else: print("入力が適切ではありません.もう一度入力し直してください") continue
#! /usr/bin/env python3 # Calculation of π using quadrature. Sequential algorithm using Cython. # # Copyright © 2008–2012, 2014, 2015 Russel Winder from output import out from time import time from processAll_extension_cython import sequential if __name__ == '__main__': n = 1000000000 delta = 1.0 / n startTime = time() pi = sequential(n, delta) elapseTime = time() - startTime out(__file__, pi, n, elapseTime)
def makeTile(self, region): tile_im = Image.new("RGBA", (512,512), color=(0,0,0,0)) markers = [] counter = 0 heights = [None] * (16 * 16 * 32 * 32) for ch in region.chunks: counter = counter + 1 if counter % 64 == 0: out(".") if ch[1] and isinstance(ch[1].value, dict) and ch[1].value and 'Level' in ch[1].value: data = ch[1]['Level'] worldX = data['xPos'].value * 16 worldZ = data['zPos'].value * 16 if 'Sections' in data.value and 'HeightMap' in data.value: sections = [None] * 16 for s in data['Sections'].value: sections[s['Y'].value] = s for x in range(16): for z in range(16): mapX = worldX + x - self.world_offset[0] mapY = worldZ + z - self.world_offset[1] tileX = (worldX % 512) + x tileZ = (worldZ % 512) + z if mapX >= 0 and mapX < self.combined_width and mapY >= 0 and mapY < self.combined_height and self.mask_data[mapX + (mapY * self.combined_width)] != 0: h = data['HeightMap'].value[x + z * 16] + 2 cl = 0 water_depth = 0 light = 0.5 while (cl == 0 or water_depth > 0) and h >= 0: h = h - 1 sect = int(h/16) if sections[sect]: blockIndex = x + z * 16 + (h % 16) * 256 bl = sections[sect]['Blocks'].value[blockIndex] if sections[sect]['Add'] != None: addValue = self.get_nibble(sections[sect]['Add'].value, blockIndex) bl = bl + (addValue << 8) if bl == 8 or bl == 9: # water handling water_depth += 1 elif bl == 10 or bl == 11: # lava handling cl = self.colors['block'][bl] light = 1 else: if water_depth > 0: if water_depth < 12 or (water_depth < 24 and (mapX + mapY) % 2): cl = self.colors['water'][0] else: cl = self.colors['water'][1] water_depth = 0 else: cl = self.colors['block'][bl] if not bl: light = 0.50 + (self.get_nibble(sections[sect]['BlockLight'].value, blockIndex) * 0.034) if bl in [31,32,37,38,39,40,78]: h = h - 1 heights[tileX * 512 + tileZ] = h if h == 0 and cl == 0: out("!!" + str(x) + "," + str(z) + "!!") if cl < 0: dataValue = self.get_nibble(sections[sect]['Data'].value, blockIndex) cl = self.colors['tint_colors'][-cl][dataValue] color = self.colors['actual'][cl] if tileZ > 0 and heights[tileX * 512 + (tileZ - 1)] and heights[tileX * 512 + (tileZ - 1)] > h: color = self.colors['dark'][cl] elif tileZ > 0 and heights[tileX * 512 + (tileZ - 1)] and heights[tileX * 512 + (tileZ - 1)] < h: color = self.colors['bright'][cl] tile_im.putpixel((tileX, tileZ), (int(color[0] * light), int(color[1] * light), int(color[2] * light))) if 'TileEntities' in data.value and len(data['TileEntities'].value) > 0: for e in data['TileEntities'].value: if e['id'].value == 'Sign': x = e["x"].value - self.world_offset[0] z = e["z"].value - self.world_offset[1] # strip the " characters off the beginning and end of the string text1 = e["Text1"].value[1:-1] text2 = e["Text2"].value[1:-1] text3 = e["Text3"].value[1:-1] text4 = e["Text4"].value[1:-1] if x > 0 and x < self.combined_width - 1 and z > 0 and z < self.combined_height - 1: if text1 == 'X' or text1 == "x" or text1 == "(X)" or text1 == "(x)": label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "x", "label": label }) elif text1 == '^^': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "portal", "label": label }) elif text1 == '(R)': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "ruins", "label": label }) elif text1 == '(M)': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "mine", "label": label }) elif text1 == '(K)': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "keep", "label": label }) elif text1 == '(T)': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "tower", "label": label }) elif text1 == '(F)': label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "farm", "label": label }) elif text1 == "[*]": label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "square", "label": label }) elif text1 == "(*)": label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "circle", "label": label }) elif text1 == "((*))": label = text2 if text3 != "": label = label + " " + text3 if text4 != "" and text4 != text1: label = label + " " + text4 markers.append({"x": e["x"].value, "z": e["z"].value, "type": "big_circle", "label": label }) else: pass return (tile_im, markers)