def direct_clip(stacking, band_name, clip_extents, tile_id, rename, workdir): """Clip datatypes which require no special processing.""" logger.info(' Start processing for band: %s', band_name) mosaic_filename = os.path.join(workdir, tile_id, tile_id + '_' + rename + '.tif') if os.path.exists(mosaic_filename): logger.warning("Skip previously generated result %s", mosaic_filename) return mosaic_filename warp_cmd = ('gdalwarp -te {extents}' ' -co "compress=deflate" -co "zlevel=9"' ' -co "tiled=yes" -co "predictor=2"').format( extents=clip_extents) for stack in reversed(stacking): scene_name = util.ffind(workdir, stack['LANDSAT_PRODUCT_ID'], '*' + band_name + '.tif') warp_cmd += ' ' + scene_name warp_cmd += ' ' + mosaic_filename util.execute_cmd(warp_cmd) logger.info(' End processing for %s as %s ', band_name, mosaic_filename) if not os.path.exists(mosaic_filename): logger.error('Processing failed to generate desired output: %s', mosaic_filename) return mosaic_filename
def push(cmd_args): opts, args = getopt.getopt( cmd_args, "l:r:n:m", ["local-file=", "remote-path=", "remote-name=", "no-bak", "mod="]) log.info("opts %s args:%s" % (opts, args)) local_file, remote_path, remote_name = "", "", "" bak_file, chmod = True, "777" for op, value in opts: if op == "-l" or op == "--local-file": local_file = value elif op == "-r" or op == "--remote-path": remote_path = value elif op == "-n" or op == "--remote-name": remote_name = value elif op == "-n" or op == "--mod": chmod = value elif op == "--no-bak": bak_file = False else: log.error("unkown opt:%s value:%s" % (op, value)) return False if len(opts) == 0: local_file = args[0] if len(args) >= 1 else "" remote_path = args[1] if len(args) >= 2 else "" remote_name = args[2] if len(args) >= 3 else "" if remote_path == "": remote_path = "/data/local/tmp" if os.path.isdir(local_file): # push 目录 remote_file = remote_path + "/" util.mkdir(remote_file) elif os.path.isfile(os.path.join(os.getcwd(), local_file)): # push 文件 # local_path = os.path.dirname(local_file) local_fname = os.path.basename(local_file) if remote_name == "": remote_name = local_fname remote_file = remote_path + "/" + remote_name else: log.error("local file:%s %s not exist" % (local_file, os.path.join(os.getcwd(), local_file))) return False if bak_file: shell_cmd = util.getshell('mv "%s" "%s.bak"' % (remote_file, remote_file)) util.execute_cmd(shell_cmd) log.info("local:%s remote:%s" % (local_file, remote_file)) shell_cmd = util.getcmd('push "%s" "%s"' % (local_file, remote_file)) ret, res_str = util.execute_cmd_with_stdout(shell_cmd) if not ret: return False if chmod != "": shell_cmd = util.getshell('chmod %s "%s"' % (chmod, remote_file)) if not util.execute_cmd(shell_cmd): return False # shell_cmd = util.getshell('".%s"' % remote_file) # return util.execute_cmd(shell_cmd) return True
def process_lineage_contributing(lineage_filename, n_contrib_scenes): """Check historgram for count of scenes which were not all-fill.""" logger.info(' Start checking contributing scenes') info_cmd = 'gdalinfo -hist {}' results = util.execute_cmd(info_cmd.format(lineage_filename)) # TODO: could potentially use this instead... util.remove(lineage_filename + '.aux.xml') count, array = geofuncs.parse_gdal_hist_output(results['output']) logger.info( 'Parsing histogram from lineage file found %d' ' contributing scenes', count) if count == 0: logger.warning('Found all fill lineage, tile not needed!') raise ArdTileNotNeededException() # decrement pixel values in lineage file if some scenes didn't contribute # any pixels if count != n_contrib_scenes: delta = n_contrib_scenes - count # Determine whether we need decrement the pixel # values in the lineage file or not. cmd = '' if delta == 1: if array[0] == 0: cmd = ' --calc="A-' + str(delta) + '"' elif array[1] == 0 and array[2] > 0: cmd = ' --calc="A-(A==3)"' elif delta == 2: if array[0] == 0 and array[1] == 0: cmd = ' --calc="A-' + str(delta) + '"' elif array[0] == 0 and array[2] == 0: cmd = ' --calc="A-' + str(1) + '"' if cmd != '': temp_name = lineage_filename.replace('.tif', '_linTemp.tif') calc_cmd = ('gdal_calc.py -A {lineage} --outfile {temp} {calc}' ' --type="Byte" --NoDataValue=0 --overwrite') util.execute_cmd( calc_cmd.format(lineage=lineage_filename, temp=temp_name, calc=cmd)) # compress warp_cmd = ('gdalwarp -co "compress=deflate" -co "zlevel=9"' ' -co "tiled=yes" -co "predictor=2"' ' -overwrite {} {}') util.execute_cmd(warp_cmd.format(temp_name, lineage_filename)) util.remove(temp_name) logger.info('finish updating contributing scenes') return count
def find_executable(self): eo, ee, ec = execute_cmd("which " + self.name) if eo and ec == 0: neo, ee, ec = execute_cmd(self.name + " --version") if neo and ec == 0: vers_str = re.findall("\d+.\d+", neo)[0] dep = dependency( self.name, version(vers_str.split(".")[0], vers_str.split(".")[1]), "exe", "none") installed_dep = installed_dependency(dep, False, os.path.dirname(eo), list()) return installed_dep return None
def get_version(self, dep): eo, ee, ec = execute_cmd("apt-cache policy " + dep.package_name) if eo and ec == 0: vers_str = self.strip_pre(eo.split()[4]) return version(int(self.strip_suff(vers_str.split(".")[0])), int(self.strip_suff(vers_str.split(".")[1]))) return None
def get_version(self, dep): eo, ee, ec = execute_cmd("brew info " + dep.package_name) if eo and ec == 0: vers_str = eo.split()[2] ver = version(int(self.strip_suff(vers_str.split(".")[0])), int(self.strip_suff(vers_str.split(".")[1]))) return ver return None
def prefix(self, dep): cellar_re = re.compile("/usr/local/Cellar/.*") eo, ee, ec = execute_cmd("brew info " + dep.package_name) if eo and ec == 0: for tok in eo.split(): if cellar_re.match(tok): return tok return None
def calc_nodata_9999_lineage(stacking, band_name, clip_extents, tile_id, rename, workdir): """Clip scenes which have data outside the lineage, apply -9999 fill.""" logger.info(' Start processing for band: %s', band_name) mosaic_filename = os.path.join(workdir, tile_id, tile_id + '_' + rename + '.tif') if os.path.exists(mosaic_filename): logger.warning("Skip previously generated result %s", mosaic_filename) return mosaic_filename temp_clipped_names = list() temp_masked_names = list() for level, stack in reversed(list(enumerate(stacking, start=1))): scene_name = util.ffind(workdir, stack['LANDSAT_PRODUCT_ID'], '*' + band_name + '.tif') temp_name1 = mosaic_filename.replace('.tif', '_temp%d' % level + '.tif') temp_warp_cmd = ('gdalwarp -te {extents}' ' -dstnodata "-9999" -srcnodata "-9999" {0} {1}') util.execute_cmd( temp_warp_cmd.format(scene_name, temp_name1, extents=clip_extents)) temp_clipped_names.append(temp_name1) lineg_name = util.ffind(workdir, tile_id, '*LINEAGEQA.tif') temp_name2 = mosaic_filename.replace('.tif', '_temp%dM' % level + '.tif') temp_calc_cmd = ('gdal_calc.py -A {0} -B {lineage} --outfile {1}' ' --calc="(A*(B=={level}) + (-9999*(B!={level})))"' ' --NoDataValue=-9999') util.execute_cmd( temp_calc_cmd.format(temp_name1, temp_name2, lineage=lineg_name, level=level)) temp_masked_names.append(temp_name2) temp_name = mosaic_filename.replace('.tif', '_temp.tif') temp_warp_cmd = 'gdalwarp {} {}'.format(' '.join(temp_masked_names), temp_name) util.execute_cmd(temp_warp_cmd) util.remove(*temp_masked_names + temp_clipped_names) warp_cmd = ( 'gdalwarp -dstnodata "-9999" -srcnodata "-9999" -co "compress=deflate"' ' -co "zlevel=9" -co "tiled=yes" -co "predictor=2" {} {}') util.execute_cmd(warp_cmd.format(temp_name, mosaic_filename)) util.remove(temp_name) logger.info(' End processing for %s as %s ', band_name, mosaic_filename) if not os.path.exists(mosaic_filename): logger.error('Processing failed to generate desired output: %s', mosaic_filename) return mosaic_filename
def process_lineage(stacking, band_name, clip_extents, tile_id, rename, workdir): """Create the lineage file.""" logger.info(' Start processing for band: %s', rename) lineage_filename = os.path.join(workdir, tile_id, tile_id + '_' + rename + '.tif') if os.path.exists(lineage_filename): logger.warning("Skip previously generated result %s", lineage_filename) return lineage_filename temp_names = list() for level, stack in reversed(list(enumerate(stacking, start=1))): temp_name = lineage_filename.replace('.tif', '_srcTemp%d' % level + '.tif') scene_name = util.ffind(workdir, stack['LANDSAT_PRODUCT_ID'], '*' + band_name + '.tif') calc_cmd = ( 'gdal_calc.py -A {scene} --outfile {temp}' ' --calc=" {level} * (A > -101)" --type="Byte" --NoDataValue=0') util.execute_cmd( calc_cmd.format(level=level, temp=temp_name, scene=scene_name)) temp_names.append(temp_name) warp_cmd = ('gdalwarp -te {extents} -dstnodata "0" -srcnodata "0"' ' -ot "Byte" -wt "Byte"' ' -co "compress=deflate" -co "zlevel=9"' ' -co "tiled=yes" -co "predictor=2" ').format( extents=clip_extents) warp_cmd += ' '.join(temp_names) warp_cmd += ' ' + lineage_filename util.execute_cmd(warp_cmd) util.remove(*temp_names) logger.info(' End processing for %s as %s ', band_name, lineage_filename) if not os.path.exists(lineage_filename): logger.error('Processing failed to generate desired output: %s', lineage_filename) return lineage_filename
def compile(): if compile_cmd == None: return 0 env = dict(os.environ) set_up_environment(compiler) print(compile_cmd) ret = util.execute_cmd(compile_cmd) for x in clean_files: if os.path.exists(x): os.remove(x) os.environ = env return ret
def check_dependency(self, dep): vers = self.get_version(dep) eo, ee, ec = execute_cmd("brew list --versions " + dep.package_name) ### installed if vers and eo and ec == 0: if vers.satisfies(dep.strict, dep.version): return [self.installed, self.prefix(dep)] else: return [self.installed_wrong, ""] elif vers: if vers.satisfies(dep.strict, dep.version): return [self.not_installed, ""] return [self.not_satisfiable, ""]
def check_dependency(self, dep): eo, ee, ec = execute_cmd("apt-cache policy " + dep.package_name) vers = self.get_version(dep) if eo and ec == 0: ### not installed if eo.split()[2] == "(none)": if vers and vers.satisfies(dep.strict, dep.version): return [self.not_installed, ""] else: return [self.not_satisfiable, ""] elif vers and vers.satisfies(dep.strict, dep.version): return [self.installed, self.prefix(dep)] else: return self.installed_wrong return [self.not_satisfiable, ""]
def uninject_internal(pid, abi, x86_arm): remote_loader = Command.__remote_path + abi + "/" + Command.__loader_name remote_inject_so = Command.__remote_path + abi + "/" + Command.__client_mod_name if x86_arm: remote_inject_so = Command.__remote_path + abi + "/" + Command.__client_fake_name if not util.check_exist(remote_loader): log.error("check loader not exist") return False shell_cmd = '"%s" uninject --pid=%s --so="%s"' % (remote_loader, pid, remote_inject_so) shell_cmd = util.getshell(shell_cmd) if not util.execute_cmd(shell_cmd): return False return True
def upload_script(script): load_client_script = "" # 判断当前路径是否存在 if os.path.isfile(script): load_client_script = script else: # 不存在则找tool/analyze目录下 if os.path.isfile(os.path.join(Command.__tool_local_path, script)): load_client_script = os.path.join(Command.__tool_local_path, script) remote_script = "" if "" != load_client_script: log.info("update load script to %s" % Command.__remote_path) remote_script = Command.__remote_path + os.path.basename( load_client_script) shell_cmd = util.getcmd( 'push "%s" "%s"' % (load_client_script, Command.__remote_path)) if not util.execute_cmd(shell_cmd): return "" return remote_script return ""
def inject_internal(pid, abi, init_script, need_push=False, x86_arm=False): remote_loader = Command.__remote_path + abi + "/" + Command.__loader_name remote_inject_so = Command.__remote_path + abi + "/" + Command.__client_mod_name if x86_arm: remote_inject_so = Command.__remote_path + abi + "/" + Command.__client_fake_name # 上传 初始化 script,检验脚本存不存在 remote_script = Command.upload_script(init_script) # 上传各个模块 if not util.check_exist(remote_loader) or need_push: Command.upload_tools(abi, x86_arm) shell_cmd = '"%s" inject --pid=%s --so="%s" --script=%s ' % ( remote_loader, pid, remote_inject_so, '"%s"' % remote_script if "" != remote_script else "") shell_cmd = util.getshell(shell_cmd) if not util.execute_cmd(shell_cmd): return False return True
def dolua(cmd_args): opts, args = getopt.getopt( cmd_args, "p:s:f:", ["process=", "script=", "func=", "abi=", "x86-arm", "update"]) log.info("opts %s args:%s" % (opts, args)) process_name, abi, lua_script, func_name = "", "x86", "", "" need_upate, x86_arm, zygote = False, False, False for op, value in opts: if op == "-s" or op == "--script": lua_script = value elif op == "-f" or op == "--func": func_name = value elif op == "-p" or op == "--process": process_name = value elif op == "--abi": abi = value elif op == "--x86-arm": x86_arm = True elif op == "--update": need_upate = True else: log.error("unkown opt:%s value:%s" % (op, value)) return False if len(opts) == 0: process_name = args[0] if len(args) >= 1 else "" lua_script = args[1] if len(args) >= 2 else "" func_name = args[2] if len(args) >= 3 else "" abi = args[3] if len(args) >= 4 else "" ret, process_id, remote_script, remote_loader, remote_inject_so = Command.lua_check( process_name, lua_script, zygote, abi, x86_arm, need_upate) if not ret: return False shell_cmd = '"%s" luacall --pid="%s" --so="%s" --script="%s" --func="%s" ' % \ (remote_loader, process_id, remote_inject_so, remote_script, func_name) shell_cmd = util.getshell(shell_cmd) if not util.execute_cmd(shell_cmd): return False return True
def upload_tools(abi, x86_arm): remote_path = Command.__remote_path + abi + "/" if not util.check_dir(remote_path): util.mkdir(remote_path) # 上传loader local_loader = os.path.join(Command.__tool_local_path, abi, Command.__loader_name) shell_cmd = util.getcmd('push "%s" "%s"' % (local_loader, remote_path)) if not util.execute_cmd(shell_cmd): return False if x86_arm: # 上传 loader.so local_inject_so = os.path.join(Command.__tool_local_path, abi, Command.__client_fake_name) shell_cmd = util.getcmd('push "%s" "%s"' % (local_inject_so, remote_path)) if not util.execute_cmd(shell_cmd): return False shell_cmd = util.getshell('chmod 777 "%s"/*' % remote_path) if not util.execute_cmd(shell_cmd): return False # 创建目录 remote_path = Command.__remote_path + "armeabi-v7a" + "/" if not util.check_dir(remote_path): util.mkdir(remote_path) # 上传client local_client = os.path.join(Command.__tool_local_path, "armeabi-v7a", Command.__client_mod_name) shell_cmd = util.getcmd('push "%s" "%s"' % (local_client, remote_path)) if not util.execute_cmd(shell_cmd): return False else: # 上传client local_client = os.path.join(Command.__tool_local_path, abi, Command.__client_mod_name) shell_cmd = util.getcmd('push "%s" "%s"' % (local_client, remote_path)) if not util.execute_cmd(shell_cmd): return False shell_cmd = util.getshell('chmod 777 "%s"/*' % remote_path) if not util.execute_cmd(shell_cmd): return False return True
def process_browse(bands, workdir, tile_id, outpath): """Create a pyramid-layered RBG browse file for EE.""" logger.info(' Start processing for BROWSE') output_browse_filename = os.path.join(outpath, tile_id + '.tif') if os.path.exists(output_browse_filename): logger.warning("Skip previously generated result %s", output_browse_filename) return output_browse_filename bands = { k: util.ffind(workdir, tile_id, tile_id + '_' + v + '.tif') for k, v in bands.items() } # create RGB image temp_filename1 = os.path.join(workdir, tile_id + '_brw1.tif') merge_cmd = 'gdal_merge.py -o {outfile} -separate {red} {green} {blue}' results = util.execute_cmd( merge_cmd.format(outfile=temp_filename1, **bands)) if results['status'] != 0: return results['status'] # scale the pixel values temp_filename2 = os.path.join(workdir, tile_id + '_brw2.tif') scale_cmd = 'gdal_translate -scale 0 10000 -ot Byte {} {}' results = util.execute_cmd(scale_cmd.format(temp_filename1, temp_filename2)) if results['status'] != 0: return results['status'] # apply compression browse_filename = os.path.join(workdir, tile_id + '.tif') comp_cmd = 'gdal_translate -co COMPRESS=JPEG -co PHOTOMETRIC=YCBCR {} {}' results = util.execute_cmd(comp_cmd.format(temp_filename2, browse_filename)) if results['status'] != 0: # The browse generation failed on the HSM. # Wait a short period and try again. logger.warning('gdal_translate failed to create the browse. ' 'Trying again.') time.sleep(10) results = util.execute_cmd( comp_cmd.format(temp_filename2, browse_filename)) if results['status'] != 0: return results['status'] # internal pyramids addo_cmd = 'gdaladdo {} 2 4 8 16' results = util.execute_cmd(addo_cmd.format(browse_filename)) if results['status'] != 0: # The pyramid generation failed on the HSM. # Wait a short period and try again. logger.warning('gdaladdo failed to create the pyramids. ' 'Trying again.') time.sleep(10) results = util.execute_cmd(addo_cmd.format(browse_filename)) if results['status'] != 0: return results['status'] # Copy the browse to the output location, and verify using checksums. shutil.copyfile(browse_filename, output_browse_filename) if (util.checksum_md5(browse_filename) != util.checksum_md5(output_browse_filename)): logger.warning('%s checksums do not match.', os.path.basename(browse_filename)) os.remove(output_browse_filename) return 1 else: logger.info('%s checksums match.', os.path.basename(browse_filename)) util.remove(temp_filename1, temp_filename2, browse_filename + '.aux.xml', browse_filename) logger.info(' End building browse.') return 0
def install_dependency(self, dep): log.log("Installing " + dep.name + " " + dep.package_name) eo, ee, ec = execute_cmd("brew install " + dep.package_name) return eo and ec == 0
def remove_dependency(self, dep): log.log("Removing " + dep.name + " " + dep.package_name) eo, ee, ec = execute_cmd("brew remove " + dep.package_name) return eo and ec == 0
def shell(args): shell_cmd = util.getshell(args) log.info(shell_cmd) return util.execute_cmd(shell_cmd)
def run(): print(run_cmd) return util.execute_cmd(run_cmd)
def test_execute_cmd_success(self): cmd = "wc -l /etc/passwd" return_code, stdout = execute_cmd(cmd) # stdout = 29 /etc/passwd\n self.assertTrue(return_code == 0) self.assertGreaterEqual(int(stdout.split()[0]), 1)
def remove_dependency(self, dep): log.log("Removing " + dep.name + " " + dep.package_name) if (os.getuid() != 0): err.log("Removing via apt-get requires sudo!") eo, ee, ec = execute_cmd("apt-get --yes remove " + dep.package_name) return ec == 0
def install_dependency(self, dep): log.log("Installing " + dep.name + " " + dep.package_name) if (os.getuid() != 0): err.log("Installing via apt-get requires sudo!") eo, ee, ec = execute_cmd("apt-get --yes install " + dep.package_name) return ec == 0
def prefix(self, dep): eo, ee, ec = execute_cmd("dpkg -L " + dep.package_name) if eo and ec == 0: return os.path.join(os.path.commonprefix(eo.split()[1:]), "lib", dep.package_name) return None
def add_repo(self, repo): eo, ee, ec = execute_cmd("add-apt-repository ppa:" + repo) if eo and ec == 0: return True return False
def test_execute_cmd_fail(self): cmd = "wc -l /etc/passwd_not_exists" return_code, stdout = execute_cmd(cmd) self.assertFalse(return_code == 0)