Esempio n. 1
0
    def build_ace_program(self,ace):
        print( "-- Building program with ACE" )
        source_modules=[]
        source_objects=[]
        test_modules=[]
        test_objects=[]
        test_methods=[]
        for root, dirs, files in os.walk("src/main"):
            for file in files:
                if file.endswith(".cpp") or file.endswith(".cxx") :
                    source_modules.append(os.path.join(root,file))
        for root, dirs, files in os.walk("src/test"):
            for file in files:
                if file.endswith(".cpp") or file.endswith(".cxx") or file.endswith(".C"):
                    test_modules.append(os.path.join(root,file))
        # pprint({"source_modules":source_modules,"test_modules":test_modules})
        source_objects=[];
        for file in source_modules:
            source_object = self.compile_module(ace,file)
            source_objects.append(source_object)
        self.link(ace,source_objects)

        # Build test modules...
        for file in test_modules:
            test_objects.append(self.compile_module(ace,file))
        for object in test_objects:
            self.scan_object_for_tests(object,test_methods);

        nomain_source_objects = filter(self.nomain,source_objects)

        test_objects.append(self.generate_test_harness(ace,test_methods));
        self.link_test_harness(ace,nomain_source_objects,test_objects)
        run_cmd(["./.test_harness.exe"], echoErr=False);
def run_matlab_script(original_script_name, list_of_lines_to_add, mulitcore_matlab=False, queue_run=True, debug= False):    
    ############
    # create temp matlab script file
    fh_temp = tempfile.NamedTemporaryFile("w", prefix=os.path.abspath("./")+"/tmp_", suffix=".m", delete=False)
    fh_temp.write("\n".join(list_of_lines_to_add)+"\n")
    try: fh_temp.write("".join([x for x in open(original_script_name).readlines()]))
    except: pass
    temp_name = fh_temp.name
    fh_temp.close()
    if debug:
        print "\n\n\n\n\n", temp_name, "\n\n\n\n\n"
        sys.exit(1)
    ############
    # prepare run_cmd
    fgu217_queue = 'nice -19 qrsh -cwd -q fgu217.q -v BASH_ENV="~/.bashrc" -now n'
    matlab_run_cmd = '/net/isi-software/tools/matlab2011b/bin/matlab -nosplash -nodisplay -nojvm'
    single_core = '-singleCompThread'
    curr_script_to_run = '-r "%s; exit"' % (os.path.split(fh_temp.name)[1][:-2])
    curr_run_str_list = []
    if queue_run: curr_run_str_list.append(fgu217_queue) 
    curr_run_str_list.append(matlab_run_cmd)
    if not mulitcore_matlab: curr_run_str_list.append(single_core)
    curr_run_str_list.append(curr_script_to_run)
    
    curr_run_cmd = " ".join(curr_run_str_list)
    ############
    # run the script
    #print curr_run_cmd
    #sys.exit(1)
    run_cmd(curr_run_cmd, "running matlab scripts")
    ############
    # cleanup
    os.unlink(temp_name)
Esempio n. 3
0
def rank_population(exp_name, generation, population):
    print "\n*** Rank population:"
    print "\t*** experiment = %s" % exp_name
    print "\t*** generation = %d" % generation
    print "\t*** population = %d" % population

    # calculate cost function for each model
    for i in range(0, population):
        rmse_cmd = "cd %s/%04d/%04d ; ../../../rmse ../../../vel_ref.nc ocean_his.nc" % (
            exp_name, generation, i)
        run_cmd(rmse_cmd)

    # read in each rmse file
    rmse = []
    for i in range(0, population):
        rmse_filename = "%s/%04d/%04d/rmse.txt" % (exp_name, generation, i)
        rmse_file = open(rmse_filename, 'r')
        for line in rmse_file:  # iterate over each line
            trash, rms = line.split()  # split by whitespace
            rmse.append([i, float(rms)])  # convert from string to float

    # sort ascending by rmse value
    rmse.sort(key=lambda x: x[1])

    # and write it out to file in the current generation directory
    rmse_file_name = "%s/%04d/all_rmse.txt" % (exp_name, generation)
    rmse_file = open(rmse_file_name, 'w')
    for i in range(0, population):
        print rmse[i]
        rmse_file.write("%d %f\n" % (rmse[i][0], rmse[i][1]))
    rmse_file.close()

    print "*** done rank_population"
    return
Esempio n. 4
0
    def compile_module(self,ace,path):
        try:
            ace_dir=os.path.dirname(os.path.realpath(__file__))
            target_file = replace_extension(path,".o")
            if not self.module_needs_compile(ace,path) :
                # print( "-- Skipping: " + path )
                return target_file
            print( "-- Compiling: " + path )
            compiler_args=["g++"];
            compiler_args.extend(self.gpp['options'])
            compiler_args.append("-MD") # generate .d file
            compiler_args.append("-c") # compile, too!
            compiler_args.append("-g3") # include debug symbols.
            if self.args.coverage:
                compiler_args.append("--coverage") # code coverage
                compiler_args.append("-O0")
            else:
                compiler_args.append("-O3")

            compiler_args.append("-Werror=return-type")
            compiler_args.append("-I{}".format(os.path.join(ace_dir,"include")))
            compiler_args.append("-o")
            compiler_args.append(target_file)
            for include_path in ace['include_dirs']:
                compiler_args.append("-I%s" %include_path);
            compiler_args.append(path)
            run_cmd(compiler_args)
            ace['need_link'] = True
            return target_file
        except Exception as e:
            print(f"-- Failed to build {path}")
            print(e)
            raise e
Esempio n. 5
0
def saveToHive(df,table,db,spark,hivemode="new",partition=[]):

		""" staticmethod utilizado para cargar tablas a hive

			df: DataFrame a cargar
			table: Nombre de la tabla
			db: Nombre de la base de datos (si no existe, se crea)
			spark: Variable SparkSession
			hivemode: Por defecto new. La opcion es append para juntar varias tablas
			partition: """

		spark.sql("create database if not exists "+db)

		if hivemode=='new':
			df.write.format("parquet").partitionBy(partition).saveAsTable(db+"."+table)
		
		elif hivemode=='overwrite':
			run_cmd(["impala-shell","-q","drop table "+db+"."+table])
			df.write.format("parquet").partitionBy(partition).saveAsTable(db+"."+table)
		
		else:
			df.write.format("parquet").mode(hivemode).partitionBy(partition).saveAsTable(db+"."+table)
    		
		query="INVALIDATE METADATA "+db+"."+table
		run_cmd(["impala-shell","-q",query])
Esempio n. 6
0
def get_cache(cmd = '/sys/devices/system/cpu/'):
    output = run_cmd.run_cmd("ls " + cmd).split('\n')
    path = []
    for item in output:
	if item.startswith('cpu'):
	    path.append('/'.join([cmd,item,'cache/']))
    index_path = []
    for i in path:
	if os.path.exists(i):
	    output = run_cmd.run_cmd("ls " + i).split('\n')
	    for j in output:
		index_path.append('/'.join([i,j]))
    cat_cmd = []
    for i in index_path:
	path = '/'.join([i,'size'])
	if os.path.exists(path):
	    cat_cmd.append( "cat " + path)
    output = []
    for i in cat_cmd:
	output.append(i + "," + run_cmd.run_cmd(i).strip())
    
    trim_output = []
    for i in output:
	trim_output.append(i.replace(cmd,'').replace('cache/','').replace('cat ',''))
    return trim_output
Esempio n. 7
0
def set_brightness(new_level, time):
    output = opts.XRANDR_OUTPUT

    # calibrating to xbacklight 1 - 100 by dividing by 100
    new_level /= 100.0

    new_level = min(new_level, 1.0)
    new_level = max(new_level, 0.3)

    run_cmd("xrandr --output %s --brightness %s" % (output, new_level))
Esempio n. 8
0
    def build_ace_library(self,ace):
        print( "-- Building library with ACE" )
        source_modules=[]
        source_objects=[]
        test_modules=[]
        test_objects=[]
        test_methods=[]
        for root, dirs, files in os.walk("src/main"):
            for file in files:
                if file.endswith(".cpp") or file.endswith(".cxx") or file.endswith(".C"):
                    source_modules.append(os.path.join(root,file))
        for root, dirs, files in os.walk("src/test"):
            for file in files:
                if file.endswith(".cpp") or file.endswith(".cxx") or file.endswith(".C"):
                    test_modules.append(os.path.join(root,file))
        # pprint({"source_modules":source_modules,"test_modules":test_modules})

        # Build source modules...
        source_objects=[];
        #for file in sorted(source_modules):
        #    source_objects.append(self.compile_module(ace,file))
        source_objects = Parallel(n_jobs=4)(
            delayed(compileAModule)(self,ace,fileName)
            for fileName in source_modules)

        if len(source_objects) or not os.path.exists("%s.a" %ace['target']):
            ace['need_link'] = True

        archive = None
        if ace['need_link'] :
            archive = self.archive(ace,source_objects)

        # Build test modules...
        for file in test_modules:
            test_objects.append(self.compile_module(ace,file))
        for object in test_objects:
            self.scan_object_for_tests(object,test_methods);

        test_objects.append(self.generate_test_harness(ace,test_methods));
        self.link_test_harness(ace,source_objects,test_objects)
        run_cmd(["./.test_harness.exe"]);


        # install the library in ~/.ace/
        path = "~/.ace/%s" %ace['name']
        path = os.path.expanduser(path)
        print( "Installing library to %s" %path )
        if os.path.isdir(path):
            shutil.rmtree(path)
        os.makedirs(path)
        shutil.copytree("include" , "%s/include" %(path))
        if archive is not None:
            shutil.copyfile("%s" %archive, "%s/%s" %(path,archive))
        json.dump(ace,open("%s/ace.json" %path,"w"))
        run_cmd(["find", path, "-type" , "f"])
Esempio n. 9
0
def evolve(exp_name, generation, population):
    print "\n***** evol:"
    print "\t***** experiment = %s" % exp_name
    print "\t***** generation = %d" % generation
    print "\t***** population = %d" % population

    evol_cmd = "./evol %s %d %d" % (exp_name, generation, population)
    run_cmd(evol_cmd)

    print "*** done evol"
    return
Esempio n. 10
0
    def generateCoverageSite(self):
        print("Processing Coverage Data")

        print("- Building trace file")
        run_cmd(["lcov", "-c", "-d" , ".", "-o", ".test_harness.coverage"], echo=False)
        print("- Removing library traces")
        run_cmd(["lcov", "-r", ".test_harness.coverage", "/usr/*", "-o", ".test_harness.coverage"], echo=False)
        run_cmd(["lcov", "-r", ".test_harness.coverage", "/Applications/*", "-o", ".test_harness.coverage"], echo=False)
        print("- Removing test code traces")
        run_cmd(["lcov", "-r", ".test_harness.coverage", "*/src/test/*", "-o", ".test_harness.coverage"], echo=False)
        print("- Generating coverage site")
        run_cmd(["genhtml", "-o", ".coverage", ".test_harness.coverage"], echo=False)
Esempio n. 11
0
 def archive(self,ace,objects):
     target="%s.a" %ace['target']
     if os.path.exists(target):
         os.remove(target)
     linker_args=["ar"];
     #linker_args.append("--no_warning_for_no_symbols")
     linker_args.append("-rcs")
     linker_args.append(target)
     for object in objects:
         linker_args.append(object)
     run_cmd(linker_args)
     return target
Esempio n. 12
0
 def archive(self,ace,objects):
     target="%s.a" %ace['target']
     if os.path.exists(target):
         os.remove(target)
     linker_args=["ar"];
     #linker_args.append("--no_warning_for_no_symbols")
     linker_args.append("-rcs")
     linker_args.append(target)
     for object in objects:
         linker_args.append(object)
     run_cmd(linker_args)
     return target
Esempio n. 13
0
def init_population(exp_name, generation, population):
    print "Initialize population:"
    print "\texperiment = %s" % exp_name
    print "\tgeneration = %d (should be zero!)" % generation
    print "\tpopulation = %d\n" % population

    # create population size random bottom drag scenarios
    min_drag = 0.0001
    max_drag = 0.1
    gen_drag_cmd = "./init_drag %f %f %s %d %d" % (
        min_drag, max_drag, exp_name, generation, population)
    run_cmd(gen_drag_cmd)
    return
Esempio n. 14
0
    def link_test_harness(self,ace,source_objects,test_objects):
        ace_dir=os.path.dirname(os.path.realpath(__file__))
        linker_args=["g++"]
        if self.args.coverage:
            coverageArgs = self.gpp.get("linker-coverage-args",[])
            print(f"Linking with coverage:{coverageArgs}")
            linker_args.extend(coverageArgs)
        linker_args.extend(["-g3",
                            "-rdynamic",
                            "-o",".test_harness.exe"]) #,".test_harness.o"])
        linker_args.extend(source_objects)
        linker_args.extend(test_objects)
        needRunTest = True
        for x in source_objects:
            if self.hasAceRunTest(x):
                print("Using {} for ace::run_test".format(x))
                needRunTest = False
        for x in test_objects:
            if self.hasAceRunTest(x):
                print("Using {} for ace::run_test".format(x))
                needRunTest = False

        dependency_flags = set()
        if 'dependencies' in ace :
            for dependency in ace['expandedDependencies'] :
                dependency_ace = json.load(open(os.path.expanduser("~/.ace/%s/ace.json" %dependency['name'])))
                if ('header-only' in dependency_ace) and dependency_ace['header-only']:
                    continue;
                if 'dependency-flags' in dependency_ace:
                    dependency_flags.update(dependency_ace['dependency-flags'])
                ## Grab any linker options that have to be applied before including a library.
                linker_args.extend(self.gpp['library-options'])
                libraryFile = os.path.expanduser("~/.ace/%s/%s.a" %(dependency['name'],dependency_ace['target']))
                if self.hasAceRunTest(libraryFile):
                    print("Using library {} for ace::run_test".format(libraryFile))
                    needRunTest = False

                linker_args.append(libraryFile);
        if 'lflags' in ace:
            linker_args.extend(ace['lflags']);
        if 'dependency-flags' in ace:
            dependency_flags.update(ace['dependency-flags']);

        if needRunTest:
            shutil.copy(os.path.join(ace_dir,"cpp_test_run.cpp"),".test_run.cpp")
            self.compile_module(ace,".test_run.cpp")
            linker_args.append(".test_run.o")

        linker_args.extend(dependency_flags)
        linker_args.extend(self.gpp['linker-final-options'])
        run_cmd(linker_args,echo=True)
Esempio n. 15
0
    def execute(self):
        logfile_path = os.path.join(self.temp_folder, "logfile.txt")
        completed_file_path = os.path.join(self.temp_folder, "completed")
        errors_file_path = os.path.join(self.temp_folder, "errors")

        script_path = os.path.join(self.path, 'Payload/script.sh')

        command_str = "sh {script_path} {temp_folder} {compiler_name} {code_file_path}".format(
            script_path=script_path,
            temp_folder=self.temp_folder,
            compiler_name=self.compiler_name,
            code_file_path=self.code_file_path)

        from run_cmd import run_cmd
        rc, response = run_cmd(command_str)

        if rc != 0:
            raise Exception

        f = open(completed_file_path, 'r')
        completed_data = f.read()
        f.close()

        data = completed_data.split('*-COMPILEBOX::ENDOFOUTPUT-*')[0]
        exec_time = completed_data.split('*-COMPILEBOX::ENDOFOUTPUT-*')[1]

        if not os.path.exists(errors_file_path):
            errors_data = ""
        else:
            f = open(errors_file_path, 'r')
            errors_data = f.read()
            f.close()

        rmtree(self.temp_folder)
        return data, exec_time, errors_data
Esempio n. 16
0
    def link_test_harness(self,ace,source_objects,test_objects):
        ace_dir=os.path.dirname(os.path.realpath(__file__))
        linker_args=["g++"]
        linker_args.extend(["-g3",
                            "-rdynamic",
                            "-o",".test_harness.exe"]) #,".test_harness.o"])
        linker_args.extend(source_objects)
        linker_args.extend(test_objects)
        needRunTest = True
        for x in source_objects:
            if self.hasAceRunTest(x):
                print("Using {} for ace::run_test".format(x))
                needRunTest = False
        for x in test_objects:
            if self.hasAceRunTest(x):
                print("Using {} for ace::run_test".format(x))
                needRunTest = False

        dependency_flags = set()
        if 'dependencies' in ace :
            for dependency in ace['expandedDependencies'] :
                dependency_ace = json.load(open(os.path.expanduser("~/.ace/%s/ace.json" %dependency['name'])))
                if ('header-only' in dependency_ace) and dependency_ace['header-only']:
                    continue;
                if 'dependency-flags' in dependency_ace:
                    dependency_flags.update(dependency_ace['dependency-flags'])
                ## Grab any linker options that have to be applied before including a library.
                linker_args.extend(self.gpp['library-options'])
                libraryFile = os.path.expanduser("~/.ace/%s/%s.a" %(dependency['name'],dependency_ace['target']))
                if self.hasAceRunTest(libraryFile):
                    print("Using library {} for ace::run_test".format(libraryFile))
                    needRunTest = False

                linker_args.append(libraryFile);
        if 'lflags' in ace:
            linker_args.extend(ace['lflags']);
        if 'dependency-flags' in ace:
            dependency_flags.update(ace['dependency-flags']);

        if needRunTest:
            shutil.copy(os.path.join(ace_dir,"cpp_test_run.cpp"),".test_run.cpp")
            self.compile_module(ace,".test_run.cpp")
            linker_args.append(".test_run.o")

        linker_args.extend(dependency_flags)
        linker_args.extend(self.gpp['linker-final-options'])
        run_cmd(linker_args,echo=True)
Esempio n. 17
0
def move_to_flume():
    for filename in os.listdir(DIRNAME):
        (ret, out, err) = run_cmd([
            'hdfs', 'dfs', '-copyFromLocal', '-f',
            os.path.abspath(os.path.join(DIRNAME, filename)),
            os.path.join(HDFS_DIR, filename)
        ])
        print(ret, out, err)
Esempio n. 18
0
def get_brightness():
    out = run_cmd("xrandr --current --verbose")
    for line in out.split("\n"):
        if line.find("Brightness") != -1:
            brt = line.split(":")[1]
            brt = float(brt)
    # calibrating to xbacklight 1 - 100 by multiplying by 100
    return brt * 100
Esempio n. 19
0
def send_to_hdfs():
    for filename in os.listdir(OUTPUT_DIR):
        (ret, out, err) = run_cmd([
            'hdfs', 'dfs', '-put', '-f',
            os.path.abspath(os.path.join(OUTPUT_DIR, filename)),
            os.path.join(IMPALA_DIR, mapper_out[filename][2])
        ])
        print(ret, out, err)
Esempio n. 20
0
def get_brightness():
    if not can_use():
        ret = float(run_cmd("xbacklight -get"))
        return ret
    else:
        ctrls = get_controllers()
        ctrl = Controller(ctrls[0])
        ret = float(ctrl.brightness())
        return ret
Esempio n. 21
0
def get_window():
  focused_cmd = opts.CHECK_TITLE_CMD
  if opts.CHECK_PID:
    focused_cmd = opts.CHECK_PID_CMD

  try: window = run_cmd(focused_cmd).strip()
  except Exception, e: print e; window = None

  return window
Esempio n. 22
0
 def detect_gpp(self) :
     gpp_version = run_cmd(["g++", "--version"], echo=False)
     gpp_version_string = gpp_version.stdout[0]
     for key,value in self.config['g++-version-map'].iteritems():
         if key==gpp_version_string:
             return self.config['g++-versions'][value]
     gpp_default = self.config['g++-version-map']['default']
     print("WARN: unrecognized g++ version \"%s\". Using default \"%s\" instead" %(gpp_version_string, gpp_default))
     return self.config['g++-versions'][gpp_default]
Esempio n. 23
0
def get_brightness():
    if not can_use():
      ret = float(run_cmd("xbacklight -get"))
      return ret
    else:
      ctrls = get_controllers()
      ctrl = Controller(ctrls[0])
      ret = float(ctrl.brightness())
      return ret
Esempio n. 24
0
    def build_ace_container(self,ace):
        print( "-- Building as ace container" )
        if self.args['clone-missing'] and not ace == None and 'modules' in ace:
            print( "-- checking for missing repos" )
            for module in ace['modules']:
                if not os.path.isdir(module):
                    moduleDefinition = ace['modules'][module];
                    print( "--- Need to clone module: %s"  %module )
                    if "git-remotes" in  moduleDefinition:
                        if not "origin" in moduleDefinition["git-remotes"]:
                            raise Exception("Module \"" + module + "\" has git-remotes info, but no origin. Could not clone it.")
                        run_cmd(["git","clone",moduleDefinition["git-remotes"]["origin"],module]);
                    else:
                        raise Exception("Module \"" + module + "\" has no repository info and is missing. Could not clone it.")

        for item in os.listdir("."):
            if not item.startswith('.') :
                if os.path.isdir(item) :
                    self.descend(item)
Esempio n. 25
0
    def build_ace_container(self,ace):
        print( "-- Building as ace container" )
        if self.args.clone_missing and not ace == None and 'modules' in ace:
            print( "-- checking for missing repos" )
            for module in ace['modules']:
                if not os.path.isdir(module):
                    moduleDefinition = ace['modules'][module];
                    print( "--- Need to clone module: %s"  %module )
                    if "git-remotes" in  moduleDefinition:
                        if not "origin" in moduleDefinition["git-remotes"]:
                            raise Exception("Module \"" + module + "\" has git-remotes info, but no origin. Could not clone it.")
                        run_cmd(["git","clone",moduleDefinition["git-remotes"]["origin"],module]);
                    else:
                        raise Exception("Module \"" + module + "\" has no repository info and is missing. Could not clone it.")

        print( "-- Scanning directory" )
        for item in os.listdir("."):
            if not item.startswith('.') :
                if os.path.isdir(item) :
                    self.descend(item)
Esempio n. 26
0
 def detect_gpp(self) :
     gpp_version = run_cmd(["g++", "--version"], echo=True, echoErr=False)
     gpp_version_string = gpp_version.stdout[0]
     print(f"detecting g++: {gpp_version_string}")
     print(f"config: {self.config['g++-version-map']}")
     for key,value in self.config['g++-version-map'].items():
         if key==gpp_version_string:
             print("Found g++ version \"%s\"" %(gpp_version_string))
             return self.config['g++-versions'][value]
     gpp_default = self.config['g++-version-map']['default']
     print("WARN: unrecognized g++ version \"%s\". Using default \"%s\" instead" %(gpp_version_string, gpp_default))
     return self.config['g++-versions'][gpp_default]
Esempio n. 27
0
def get_cpuinfo(cmd = "cat /proc/cpuinfo"):
    output = run_cmd.run_cmd(cmd).split('\n')
    cpuinfo = {}
    for line in output:
	tmp = line.split(':')
	if (len(tmp) < 2 or len(tmp) > 2):
	    continue
    	key = tmp[0].strip()
	value = tmp[1].strip()
	if key in cpuinfo:
	    cpuinfo[key].append(value)
	else:
	    cpuinfo[key] = [value]
    return cpuinfo
Esempio n. 28
0
    def link(self,ace,objects):
        need_link = ace['need_link']
        linker_args=["g++"];
        linker_args.append("-rdynamic")
        linker_args.append("-o")
        linker_args.append(ace['target'])
        linker_args.extend(objects)
        if not os.path.exists(ace['target']):
            need_link = True
            target_time = 0
        else:
            target_time = os.path.getmtime(ace['target'])

        dependency_flags = set()
        if 'dependencies' in ace :
            for dependency in ace['expandedDependencies'] :
                dependency_ace = json.load(open(os.path.expanduser("~/.ace/%s/ace.json" %dependency['name'])))
                if ('header-only' in dependency_ace) and dependency_ace['header-only']:
                    continue;

                print("-- dependency_ace:{}".format(dependency['name']))
                pprint(dependency_ace)
                library_file = os.path.expanduser("~/.ace/%s/%s.a" %(dependency['name'],dependency_ace['target']));
                linker_args.extend(self.gpp['library-options'])
                linker_args.append(library_file);
                if 'dependency-flags' in dependency_ace:
                    dependency_flags.update(dependency_ace['dependency-flags'])
                dependency_time = os.path.getmtime(library_file)
                if dependency_time > target_time:
                    print( "-- Needs link: %s newer than %s\n\t(%s vs %s)" %(library_file,ace['target'],dependency_time,target_time) )
                    need_link = True;

        linker_args.extend(dependency_flags)
        linker_args.extend(self.gpp['linker-final-options'])
        if need_link:
            run_cmd(linker_args,echo="True")
Esempio n. 29
0
 def compile_module(self,ace,path):
     ace_dir=os.path.dirname(os.path.realpath(__file__))
     target_file = replace_extension(path,".o")
     if not self.module_needs_compile(ace,path) :
         # print( "-- Skipping: " + path )
         return target_file
     print( "-- Compiling: " + path )
     compiler_args=["g++"];
     compiler_args.extend(self.gpp['options'])
     compiler_args.append("-MD") # generate .d file
     compiler_args.append("-c") # compile, too!
     compiler_args.append("-g3") # include debug symbols.
     compiler_args.append("-O3") # optimize a lot.
     compiler_args.append("-rdynamic") # Pass the flag -export-dynamic to the ELF linker, on targets that support it. This instructs the linker to add all symbols, not only used ones, to the dynamic symbol table. This option is needed for some uses of dlopen or to allow obtaining backtraces from within a program.
     compiler_args.append("-Werror=return-type")
     compiler_args.append("-I{}".format(os.path.join(ace_dir,"include")))
     compiler_args.append("-o")
     compiler_args.append(target_file)
     for include_path in ace['include_dirs']:
         compiler_args.append("-I%s" %include_path);
     compiler_args.append(path)
     run_cmd(compiler_args)
     ace['need_link'] = True
     return target_file
Esempio n. 30
0
    def link(self,ace,objects):
        need_link = ace['need_link']
        linker_args=["g++"];
        linker_args.append("-rdynamic")
        linker_args.append("-o")
        linker_args.append(ace['target'])
        linker_args.extend(objects)
        if not os.path.exists(ace['target']):
            need_link = True
            target_time = 0
        else:
            target_time = os.path.getmtime(ace['target'])

        dependency_flags = set()
        if 'dependencies' in ace :
            for dependency in ace['expandedDependencies'] :
                dependency_ace = json.load(open(os.path.expanduser("~/.ace/%s/ace.json" %dependency['name'])))
                if ('header-only' in dependency_ace) and dependency_ace['header-only']:
                    continue;

                print("-- dependency_ace:{}".format(dependency['name']))
                pprint(dependency_ace)
                library_file = os.path.expanduser("~/.ace/%s/%s.a" %(dependency['name'],dependency_ace['target']));
                linker_args.extend(self.gpp['library-options'])
                linker_args.append(library_file);
                if 'dependency-flags' in dependency_ace:
                    dependency_flags.update(dependency_ace['dependency-flags'])
                dependency_time = os.path.getmtime(library_file)
                if dependency_time > target_time:
                    print( "-- Needs link: %s newer than %s\n\t(%s vs %s)" %(library_file,ace['target'],dependency_time,target_time) )
                    need_link = True;

        linker_args.extend(dependency_flags)
        linker_args.extend(self.gpp['linker-final-options'])
        if need_link:
            run_cmd(linker_args,echo="True")
Esempio n. 31
0
    def get_tf_output(cls, name):

        res = run_cmd('terraform', 'output', '-json', name)

        cls.logger.debug(json.dumps(res))

        if res['exit_code'] != 0:
            cls.logger.error(
                "failed to get {} from Terraform outputs".format(name))
            return None

        try:
            return json.loads(res['stdout'])
        except json.JSONDecodeError:
            return res['stdout'].strip().strip('"')
Esempio n. 32
0
def ship2device():
    from mysql_handler import get_except_ip
    global ip_except
    ip_except = get_except_ip()
    print ip_except

    for item in range(211, 212):
        ip = '192.168.1.' + str(item)
        global ip_except
        if ip in ip_except:
            print 'ip is except'
        else:
            global ip_list
            ip_list.append(ip)
            print ip
            print pack_name
            from run_cmd import update
            update(ip, pack_name)

    time.sleep(120)

    global pack_url
    global pack_url1
    text = 'HI ALL, \n泊车版本已经打包完成, 你可以在如下地址获取到最新的泊车版本\n' + pack_url + '\n' + pack_url1
    text = text + "\n\n更新内容可在http://192.168.1.164:8080/job/PARK/lastBuild/changes 获得.\n\n"
    text = text + "\n\n目前如下设备已经升级到最新版本([X]代表已升级, [ ]代表未升级):\n\n"

    from run_cmd import run_cmd
    for item in range(211, 212):
        ip = '192.168.1.' + str(item)
        version_text = run_cmd(ip, 'cat /var/version/version')
        v = version_text.split('=')
        version_text = v[1]
        global version
        if ip in ip_except:
            text = text + '[ ] ' + ip + '\n'
        else:
            text = text + '[X] ' + ip + '      ' + version_text + '\n'

    import send_mail
    send_mail.send_mail(sub="[PARK BUILD FINISHED] " + pack_name, content=text)
Esempio n. 33
0
def ship2device():
    from mysql_handler import get_except_ip
    global ip_except
    ip_except = get_except_ip()
    print ip_except

    for item in range(211, 212):
        ip = '192.168.1.' + str(item)
        global ip_except
        if ip in ip_except:
            print 'ip is except'
        else:
            global ip_list
            ip_list.append(ip)
            print ip
            print pack_name
            from run_cmd import update
            update(ip, pack_name)

    time.sleep(120)

    global pack_url
    global pack_url1
    text = 'HI ALL, \n泊车版本已经打包完成, 你可以在如下地址获取到最新的泊车版本\n' + pack_url + '\n' + pack_url1
    text = text + "\n\n更新内容可在http://192.168.1.164:8080/job/PARK/lastBuild/changes 获得.\n\n"
    text = text + "\n\n目前如下设备已经升级到最新版本([X]代表已升级, [ ]代表未升级):\n\n"

    from run_cmd import run_cmd
    for item in range(211, 212):
        ip = '192.168.1.' + str(item)
        version_text = run_cmd(ip, 'cat /var/version/version')
        v = version_text.split('=')
        version_text = v[1]
        global version
        if ip in ip_except:
            text = text + '[ ] ' + ip + '\n'
        else:
            text = text + '[X] ' + ip + '      ' + version_text + '\n'

    import send_mail
    send_mail.send_mail(sub="[PARK BUILD FINISHED] " + pack_name, content=text)
Esempio n. 34
0
def init_generation(exp_name, generation, population):
    # create the directory structure for this experiment
    for i in range(0, population):
        the_dir = "mkdir -p %s/%04d/%04d" % (exp_name, generation, i)
        run_cmd(the_dir)

    # copy the roms binary and input text files to each run directory
    for i in range(0, population):
        cp_cmd = "cp -r %s/* %s/%04d/%04d/." % (config.roms_bin_data, exp_name,
                                                generation, i)
        run_cmd(cp_cmd)

    # softlink the static data into each directory
    for i in range(0, population):
        ln_cmd = "ln -sf %s/* %s/%04d/%04d/" % (config.roms_static_data,
                                                exp_name, generation, i)
        run_cmd(ln_cmd)
Esempio n. 35
0
    def genCSVs(self):
        sc = self.sc
        path = self.path
        pdata = sc.addFile(path + '/' + self.accbd_file)
        data = SparkFiles.get(self.accbd_file)
        table_names = subprocess.Popen(["mdb-tables", "-1", data],
                                       stdout=subprocess.PIPE).communicate()[0]
        #print table_names
        tables = table_names.splitlines()
        print tables

        for table in tables:
            if table in self.usedFiles:
                file = open(table + '.csv', 'w')
                contents = subprocess.Popen(
                    ["mdb-export", data, table],
                    stdout=subprocess.PIPE).communicate()[0]
                file.write(contents)
                file.close()

                (ret, out, err) = run_cmd(
                    ['hdfs', 'dfs', '-put', '-f', table + '.csv', path])
                remove(getcwd() + "/" + table + ".csv")
Esempio n. 36
0
 def build_make(self):
     print( "-- Building make project" )
     make_args=["make"]
     run_cmd(make_args)
Esempio n. 37
0
def download_tiles(dem_url, aoi_geom, output_vrt, margin=DEFAULT_DEM_MARGIN_DEG):
    """Fetches DEM tiles from S3.

    If the specified URL is a single object, it is assumed to be a
    text file containing the prefix of the DEM in the current
    bucket. This can be used like a symbolic link to point at the most
    recent version in the bucket. For example, the GBDX account
    contains a factory DEM at s3://dgdem/current.txt. Inside of
    current.txt is the actual prefix, "DEM_2016_08_12". So tiles are
    stored under s3://dgdem/DEM_2016_08_12.

    If the specified URL is not a single object (i.e. it is a prefix),
    DEM tiles are assumed to be stored within it. The tiles are
    assumed to be in the EPSG:4326 DG tiling scheme.

    Args:
        dem_url: S3 URL of DEM tiles.
        aoi_geom: Shapely geometry defining the AOI to cover.
        output_vrt: Filename of output VRT containing the DEM tiles
            that cover the AOI.
        margin: Margin in degrees to buffer around AOI.

    Returns True if the tiles were downloaded successfully, False if not.

    """

    # Check whether the DEM URL is a prefix
    s3 = boto3.resource("s3")
    (bucket_name, key) = parse_s3_url(dem_url)
    obj = s3.Object(bucket_name, key)
    try:
        obj.content_length
        is_prefix = False
        logger.info("%s is a pointer, downloading contents" % dem_url)
    except ClientError as exc:
        if "404" in str(exc):
            is_prefix = True
            logger.info("%s is a prefix" % dem_url)
        else:
            logger.error(str(exc))
            return False
    bucket = s3.Bucket(bucket_name)

    # If the URL is not a prefix, open it to find the actual prefix
    if not is_prefix:
        # Download file with prefix in it
        pointer = bucket.Object(key)
        prefix = pointer.get()["Body"].read().strip()
    else:
        # URL is already a prefix
        prefix = key

    # Get the tile keys
    logger.info("Searching for tiles under s3://%s/%s" % (bucket_name, prefix))
    dem_tiles = {}
    for obj in bucket.objects.filter(Prefix=prefix):
        mobj = re.search("([0-3]+)[^/]+$", obj.key)
        if mobj is not None:
            dem_tiles[mobj.group(1)] = obj.key
    if not dem_tiles:
        logger.error("No DEM tiles found in bucket %s under prefix %s" % \
                     (bucket_name, prefix))
        return False

    # Grab a key and determine the zoom level
    zoom_level = len(dem_tiles.keys()[0])
    logger.info("Found %d tiles, assuming zoom level %d" % (len(dem_tiles), zoom_level))

    # Get the quadkeys that cover the AOI
    scheme = tiletanic.tileschemes.DGTiling()
    tile_gen = tiletanic.tilecover.cover_geometry(scheme, aoi_geom, zoom_level)
    qks = [scheme.quadkey(tile) for tile in tile_gen]
    missing_qks = [qk for qk in qks if qk not in dem_tiles]
    if missing_qks:
        logger.error("%d quadkeys missing from DEM: %s" % \
                     (len(missing_qks), ", ".join(missing_qks)))
        return False
    logger.info("Found %d quadkeys that cover the AOI: %s" % \
                (len(qks), ", ".join(qks)))

    # Download DEM tiles from S3
    out_dir = os.path.dirname(output_vrt)
    dem_files = []
    for qk in qks:
        key = dem_tiles[qk]
        dem_filename = os.path.join(out_dir, key)
        dem_filename_dir = os.path.dirname(dem_filename)
        if not os.path.isdir(dem_filename_dir):
            os.makedirs(dem_filename_dir)
        dem_files.append(dem_filename)
        logger.info("Downloading quadkey %s tile s3://%s/%s to %s" % \
                    (qk, bucket_name, key, dem_filename))
        bucket.download_file(key, dem_filename)

    # Generate VRT containing DEM tiles
    logger.info("Creating VRT " + output_vrt)
    run_cmd(["gdalbuildvrt",
             "-vrtnodata",
             "None",
             output_vrt] + dem_files,
            fail_msg="Failed to create DEM VRT")
    return True
Esempio n. 38
0
def local_dem_chip(base_name,
                   dem_dir,
                   bbox,
                   rpc_dem,
                   apply_geoid,
                   gdal_cachemax,
                   warp_memsize,
                   warp_threads,
                   margin=DEFAULT_DEM_MARGIN_DEG):
    """Subsets a local DEM for a specific area.

    Args:
        base_name: Base name of generated files.
        dem_dir: Path to destination directory for DEM chip.
        bbox: Shapely geometry containing area to extract.
        rpc_dem: Path to local DEM.
        apply_geoid: True to add geoid height to DEM, false to
            skip. Necessary for DEMs that are measured from
            geoid. (Most are.)
        gdal_cachemax: Cache size to use for GDAL utilities.
        warp_memsize: Extra cache size for warping.
        warp_threads: Number of threads to use for warping.

    Returns the path to the DEM chip.

    """

    # Subset DEM for this input including some margin. NOTE: The DEM
    # is assumed to be in a projection where pixels are measured in
    # degrees.
    bounds = bbox.bounds
    min_lat = bounds[1] - margin
    min_lon = bounds[0] - margin
    max_lat = bounds[3] + margin
    max_lon = bounds[2] + margin
    dem_chip = os.path.join(dem_dir, base_name + "_DEM.tif")
    logger.info("Subsetting DEM, (lat, lon) = (%.10f, %.10f) - (%.10f, %.10f)" % \
                (min_lat, min_lon, max_lat, max_lon))
    run_cmd(["gdal_translate",
             "--config",
             "GDAL_CACHEMAX",
             str(gdal_cachemax),
             "-projwin",
             str(min_lon),
             str(max_lat),
             str(max_lon),
             str(min_lat),
             rpc_dem,
             dem_chip],
            fail_msg="Failed to subset DEM %s" % rpc_dem,
            cwd=dem_dir)

    # Get the DEM's pixel resolution
    dem_pixel_size = None
    try:
        ds = gdal.Open(dem_chip)
        dem_pixel_size = ds.GetGeoTransform()[1]
    finally:
        ds = None
    if dem_pixel_size is None:
        raise InputError("Failed to get DEM chip %s pixel size" % dem_chip)
    logger.info("DEM pixel size is %.10f" % dem_pixel_size)

    # Check whether the DEM needs to be adjusted to height above ellipsoid
    if apply_geoid:
        # Subset geoid to match the DEM chip
        geoid_chip = os.path.join(dem_dir, base_name + "_GEOID.tif")
        logger.info("Subsetting geoid, (lat, lon) = (%.10f, %.10f) - (%.10f, %.10f)" % \
                    (min_lat, min_lon, max_lat, max_lon))
        run_cmd(["gdalwarp",
                 "--config",
                 "GDAL_CACHEMAX",
                 str(gdal_cachemax),
                 "-wm",
                 str(warp_memsize),
                 "-t_srs",
                 "EPSG:4326",
                 "-te",
                 str(min_lon),
                 str(min_lat),
                 str(max_lon),
                 str(max_lat),
                 "-tr",
                 str(dem_pixel_size),
                 str(dem_pixel_size),
                 "-r",
                 "bilinear",
                 GEOID_PATH,
                 geoid_chip],
                fail_msg="Failed to subset geoid %s" % GEOID_PATH,
                cwd=dem_dir)

        # Add the geoid to the DEM chip
        dem_plus_geoid_chip = os.path.join(dem_dir, base_name + "_DEM_PLUS_GEOID.tif")
        logger.info("Adding geoid to DEM")
        run_cmd(["gdal_calc.py",
                 "-A",
                 dem_chip,
                 "-B",
                 geoid_chip,
                 "--calc",
                 "A+B",
                 "--outfile",
                 dem_plus_geoid_chip],
                fail_msg="Failed to add geoid %s to DEM %s" % (geoid_chip, dem_chip),
                cwd=dem_dir)
        dem_chip = dem_plus_geoid_chip

    return dem_chip
def compile_matlab_script(original_script_name, list_of_lines_to_add, mulitcore_matlab=False, queue_run=False, debug= False, custom_folders_to_compile= []):    
    ############
    # create temp matlab script file
    temp_dir = tempfile.mkdtemp( prefix=os.path.abspath("./")+"/tmp_")
    ###################
    # Import custom pathdefenitions if available
    try: shutil.copy("./pathdef.m", temp_dir+"/pathdef.m")
    except: pass
    original_script_name = os.path.abspath(original_script_name)
    with restore_curr_dir():
        os.chdir(temp_dir)
        #print os.getcwd()
        add_path_track, known_functions = find_all_scripts_paths(original_script_name, [x.strip("addpath('").strip("')") for x in list_of_lines_to_add], set([original_script_name]))
        #add_path_track, known_functions = find_all_scripts_paths(original_script_name, [], set([original_script_name]))
        #print "\n\n",known_functions
        #sys.exit(1)
        for each_function in known_functions: 
            copyedit_script_for_compiling(each_function)
        fh_temp = tempfile.NamedTemporaryFile("w", prefix=os.path.abspath("./")+"/tmp_", suffix=".m", delete=False)
        #list_of_lines_to_add.append("cd('%s')" % os.path.split(original_script_name)[0])
        list_of_lines_to_add.append("addpath('/net/isi-backup/restricted/face/matlab_mydepfun/')")
        list_of_lines_to_add.append("file_list = mydepfun('%s', true)" % os.path.split(original_script_name)[1])
        #list_of_lines_to_add.append("cd('%s')" % temp_dir)        
        ENUMERATE_TOOLBOXES = [
                             "[temporary_files_variable, toolbox_folders] = dependencies.toolboxDependencyAnalysis({%s})" % ", ".join(["'" + os.path.split(x)[-1][:-2] + "'" for x in known_functions if x[-2:]==".m"]),
                             "mytoolboxList = cell(1,numel(toolbox_folders)*2);",
                             "cmp = 0;\n",
                             "for i=1:numel(toolbox_folders)",
                             "cmp = cmp +1;",
                             "if strcmp('general', toolbox_folders{i})",
                             "if numel(toolbox_folders) == 1",
                             "mytoolboxList = {'-p','matlab'};",
                             "end",
                             "else",
                             "mytoolboxList{cmp} = '-p';",
                             "cmp = cmp +1;",
                             "mytoolboxList{cmp} = toolbox_folders{i};",
                             "end",
                             "end",
                             ]
        list_of_lines_to_add.append("\n".join(ENUMERATE_TOOLBOXES)+"\n")
        
        ##############################
        # Adding the possibility to compile entire folder structures into the code
        add_in_folders_for_compile = [
                                      "".join(["custom_folders_to_compile = {"]+ ["'"+x+"'," for x in custom_folders_to_compile] +["};"]),
                                      "compile_folders = cell(1,numel(custom_folders_to_compile)*2);",
                                      "cmp = 0;",
                                      "for i=1:numel(custom_folders_to_compile)",
                                      "cmp = cmp +1;,",
                                      "compile_folders{cmp} = '-a';",
                                      "cmp = cmp +1;",
                                      "compile_folders{cmp} = custom_folders_to_compile{i};",
                                      "end",                                      
                                      ]
        list_of_lines_to_add.append("\n".join(add_in_folders_for_compile)+"\n")
        
        ###################
        # Major change in mcc command -N removes all knowledge of paths. NO TOOLBOXES
        ###################
        #MCC_string = "mcc( '-m', '-N', '-v', '-R', '-nosplash', '-R', '-nodisplay', '-R', '-nojvm', "
        MCC_string = "mcc( '-m', '-N', mytoolboxList{1:numel(mytoolboxList)}, compile_folders{1:numel(compile_folders)},'-v', '-R', '-nosplash', '-R', '-nodisplay', '-R', '-nojvm', " 
        if not mulitcore_matlab: MCC_string = MCC_string + "'-R', '-singleCompThread', "
        MCC_string = MCC_string + "'%s', file_list{1:numel(file_list)-1})" % os.path.split(original_script_name)[1]
        list_of_lines_to_add.append(MCC_string)
        list_of_lines_to_add.append("exit")
        fh_temp.write(";\n".join(list_of_lines_to_add)+";\n")
        temp_name = fh_temp.name
        fh_temp.close()
        #if debug:
        #    print "\n\n\n\n\n", temp_name, "\n\n\n\n\n"
        #    sys.exit(1)
        ############
        # prepare run_cmd
        fgu217_queue = 'nice -19 qrsh -cwd -q fgu217.q -v BASH_ENV="~/.bashrc" -now n'
        matlab_run_cmd = '/net/isi-software/tools/matlab2011b/bin/matlab -nosplash -nodisplay' # -nojvm'
        single_core = '-singleCompThread'
        curr_script_to_run = '-r "%s; exit"' % (os.path.split(fh_temp.name)[1][:-2])
        curr_run_str_list = []
        if queue_run: curr_run_str_list.append(fgu217_queue) 
        curr_run_str_list.append(matlab_run_cmd)
        if not mulitcore_matlab: curr_run_str_list.append(single_core)
        curr_run_str_list.append(curr_script_to_run)
        
        curr_run_cmd = " ".join(curr_run_str_list)
        ############
        # run the script
        #print curr_run_cmd
        #sys.exit(1)
        run_cmd(curr_run_cmd, "running matlab scripts")
        #
        if debug:
            print "\n\n\n\n\n", "./"+os.path.split(original_script_name)[1][:-len(".m")] 
            print original_script_name[:-len(".m")]
            print "\n\n",
            print "./run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh"
            print os.path.split(original_script_name)[0]+"/run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh"
            print "\n\n\n\n\n"
            sys.exit(1)
        #sys.exit(1)
        os.rename("./"+os.path.split(original_script_name)[1][:-len(".m")], original_script_name[:-len(".m")])
        os.rename("./run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh", os.path.split(original_script_name)[0]+"/run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh")
        ############
        # cleanup
        os.unlink(temp_name)
        os.unlink("./mccExcludedFiles.log")
        os.unlink("./readme.txt")
        for each_function in known_functions:
            #print "./"+os.path.split(each_function)[1]
            try: os.unlink("./"+os.path.split(each_function)[1])
            except: pass
        try: os.unlink("./pathdef.m")
        except:pass
    os.rmdir(temp_dir)
Esempio n. 40
0
        else:
            if self.arg_type == 'none':
                self.usages.append(line)
            else:
                # append help to last action
                if self.action:
                    old_help = self.action['help'] if self.action[
                        'help'] else ''
                    self.action['help'] = old_help + line.strip()
        return True


if __name__ == '__main__':
    '''
    exp = r'^\W{2}([a-z0-9-_]+)(.*\W{2,}.*)?'
    pattern = re.compile(exp, re.I)   # re.I Ignore case
    m = pattern.match('Hello World Wide Web')
    '''
    #args = 'python D:/work/gitlab/DeepLoader/eval/run_verify.py -h'
    args = 'python /home/ysten/tzk/run_verify.py -h'
    parser = ArgParser()
    result = run_cmd('/home/ysten/tzk/gitlab/DIGITS',
                     args,
                     process_output=parser,
                     name='jobdir')
    parser.verbose()
    #print(parser.arg_dict)
    print(1111)
    print(parser.get_args('opt'))
    #print(result)
Esempio n. 41
0
def set_brightness(new_level, time):
    run_cmd("xbacklight -set %s -time %s" % (new_level, time))
Esempio n. 42
0
def monitor_luma():
  prev_brightness = None
  prev_window = None
  prev_mean = None
  faded = None

  cur_range = opts.MAX_WHITE - opts.MIN_WHITE
  suppressed_time = 0

  last_screenshot = int(time.time())


  while True:
    time.sleep(opts.SLEEP_TIME / 1000.0)

    window = get_window()

    now = time.time()
    if prev_window != window:
      if not faded:
        if window in PREV_LEVELS:
          pred = PREV_LEVELS[window]
          fade = opts.TRANSITION_MS / 2
          if opts.VERBOSE:
            curtime = int(time.time())
            print "PRIOR|TS:%s," % curtime, "RECALLED BRIGHTNESS:", "%s/%s" % (int(pred), MAX_LEVEL), "FOR", window[:15]

          set_brightness(pred, fade)
          faded = True

          continue

    if now - last_screenshot < opts.SCREENSHOT_TIME / 1000.0:
      continue

    last_screenshot = now
    faded = False



    if prev_window == window:
      suppressed_time += opts.SCREENSHOT_TIME / 1000

      if opts.LEARN_MODE and prev_brightness:
        models.add_observation(prev_brightness, prev_mean)

      if opts.RECALIBRATE_MS > 0 and suppressed_time < opts.RECALIBRATE_MS:
          continue

      print "RECALIBRATING BRIGHTNESS AFTER %S ms" % opts.RECALIBRATE_MS

    suppressed_time = 0

    window = get_window()
    prev_window = window

    brightness = run_cmd(opts.SCREENSHOT_CMD + " " + opts.BRIGHTNESS_CMD)


    try:
      cur_mean = float(brightness)
    except Exception, e:
      print "ERROR GETTING MEAN LUMA", e
      continue


    trimmed_mean = max(min(opts.MAX_WHITE, cur_mean), opts.MIN_WHITE) - opts.MIN_WHITE
    trimmed_mean = int(trimmed_mean / models.LUMA_BUCKET) * models.LUMA_BUCKET
    range_is = float(trimmed_mean) / float(cur_range)

    new_gamma = 1 - range_is
    hour = get_hour()
    new_level =  (opts.MAX_LEVEL - opts.MIN_LEVEL) * new_gamma + opts.MIN_LEVEL

    pred_level = models.get_mean_brightness(hour, trimmed_mean)
    if pred_level is not None:
      new_level = pred_level



    prev_mean = trimmed_mean

    new_level = max(round(new_level), 1)
    if prev_brightness != new_level:
      now = int(time.time())
      print "MODEL|TS:%s," % now, "LUMA:%05i," % trimmed_mean, "NEW GAMMA:%.02f," % new_gamma, "NEW BRIGHTNESS:", "%s/%s" % (int(new_level), opts.MAX_LEVEL)

      set_brightness(new_level, opts.TRANSITION_MS / 2)

      add_prev_level(window, new_level)
    prev_brightness = new_level
def run_matlab_script_compiled(original_script_name, list_of_variables, list_of_non_symlink_var = [], mulitcore_matlab=False, queue_run=True, cluster_run=True, debug= False, ):
    
    TEMPORARY_FOLDER_FOLDER = "temp_run_folders/"
    try: os.mkdir(TEMPORARY_FOLDER_FOLDER)
    except: assert  os.path.exists(TEMPORARY_FOLDER_FOLDER)

    ############
    # create temp matlab script file
    temp_dir = tempfile.mkdtemp( prefix=os.path.abspath("./"+TEMPORARY_FOLDER_FOLDER)+"/tmp_")
    symlink_names = []
    for curr_var in list_of_variables:
        curr_var_name, curr_var_val = curr_var.split(" = ")
        curr_var_val = curr_var_val.strip("'")
        fh_out = open(temp_dir+"/"+curr_var_name+".txt", "w")
        ######################
        #  Make symbolic links to all directories to prevent 64 char limit in matlab issue
        # IS THIS ACUTALLY AN ISSUE?
        
        curr_dir, curr_file_name = os.path.split(curr_var_val)
        curr_symlink_name = temp_dir+"/"+curr_var_name
        symlink_names.append(curr_symlink_name)
        os.symlink(os.path.split(os.path.abspath(curr_var_val))[0], curr_symlink_name)
        #
        ######################
        #print os.path.split(curr_var_val)
        fh_out.write(curr_var_name+"/"+os.path.split(curr_var_val)[1])
        #fh_out.write(os.path.abspath(curr_var_val))
        fh_out.close()
    ############
    for curr_var in list_of_non_symlink_var:
        curr_var_name, curr_var_val = curr_var.split(" = ")
        curr_var_val = curr_var_val.strip("'")
        fh_out = open(temp_dir+"/"+curr_var_name+".txt", "w")
        fh_out.write(curr_var_val)
        fh_out.close()
    ############
    # prepare run_cmd
    
    shutil.copy(os.path.split(original_script_name)[0]+"/run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh", temp_dir+"/run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh")
    shutil.copy(os.path.split(original_script_name)[0]+"/"+os.path.split(original_script_name)[1][:-len(".m")], temp_dir+"/"+os.path.split(original_script_name)[1][:-len(".m")])
    
    fgu217_queue = 'nice -19 qrsh -cwd -q fgu217.q -v BASH_ENV="~/.bashrc" -now n'
    cluster_queue = 'nice -19 qrsh -cwd -q medium_jobs.q -v BASH_ENV="~/.bashrc" -now n'
    script_to_run_cmd = temp_dir+"/run_"+os.path.split(original_script_name)[1][:-len(".m")]+".sh /net/isi-software/tools/matlab2011b/"
    curr_run_str_list = []
    if queue_run:
        if cluster_run:
            curr_run_str_list.append(cluster_queue)
        else:
            curr_run_str_list.append(fgu217_queue) 
    curr_run_str_list.append(script_to_run_cmd)
    curr_run_cmd = " ".join(curr_run_str_list)
    ############
    # run the script
    if debug:
        print curr_run_cmd
        sys.exit(1)
    with restore_curr_dir():
        os.chdir(temp_dir)
        #sys.exit(1)
        if cluster_run: time.sleep(random.randint(1,7))
        run_cmd(curr_run_cmd, "running matlab scripts")
    ############
    # cleanup
    for x_files in glob.glob(temp_dir+"/*"):
        os.unlink(x_files)
    shutil.rmtree(temp_dir)
Esempio n. 44
0
def local_dem_chip(base_name,
                   dem_dir,
                   bbox,
                   rpc_dem,
                   apply_geoid,
                   gdal_cachemax,
                   warp_memsize,
                   warp_threads,
                   margin=DEFAULT_DEM_MARGIN_DEG):
    """Subsets a local DEM for a specific area.

    Args:
        base_name: Base name of generated files.
        dem_dir: Path to destination directory for DEM chip.
        bbox: Shapely geometry containing area to extract.
        rpc_dem: Path to local DEM.
        apply_geoid: True to add geoid height to DEM, false to
            skip. Necessary for DEMs that are measured from
            geoid. (Most are.)
        gdal_cachemax: Cache size to use for GDAL utilities.
        warp_memsize: Extra cache size for warping.
        warp_threads: Number of threads to use for warping.

    Returns the path to the DEM chip.

    """

    # Subset DEM for this input including some margin. NOTE: The DEM
    # is assumed to be in a projection where pixels are measured in
    # degrees.
    bounds = bbox.bounds
    min_lat = bounds[1] - margin
    min_lon = bounds[0] - margin
    max_lat = bounds[3] + margin
    max_lon = bounds[2] + margin
    dem_chip = os.path.join(dem_dir, base_name + "_DEM.tif")
    logger.info("Subsetting DEM, (lat, lon) = (%.10f, %.10f) - (%.10f, %.10f)" % \
                (min_lat, min_lon, max_lat, max_lon))
    run_cmd(["gdal_translate",
             "--config",
             "GDAL_CACHEMAX",
             str(gdal_cachemax),
             "-projwin",
             str(min_lon),
             str(max_lat),
             str(max_lon),
             str(min_lat),
             rpc_dem,
             dem_chip],
            fail_msg="Failed to subset DEM %s" % rpc_dem,
            cwd=dem_dir)

    # Get the DEM's pixel resolution
    dem_pixel_size = None
    try:
        ds = gdal.Open(dem_chip)
        dem_pixel_size = ds.GetGeoTransform()[1]
    finally:
        ds = None
    if dem_pixel_size is None:
        raise InputError("Failed to get DEM chip %s pixel size" % dem_chip)
    logger.info("DEM pixel size is %.10f" % dem_pixel_size)

    # Check whether the DEM needs to be adjusted to height above ellipsoid
    if apply_geoid:
        # Subset geoid to match the DEM chip
        geoid_chip = os.path.join(dem_dir, base_name + "_GEOID.tif")
        logger.info("Subsetting geoid, (lat, lon) = (%.10f, %.10f) - (%.10f, %.10f)" % \
                    (min_lat, min_lon, max_lat, max_lon))
        run_cmd(["gdalwarp",
                 "--config",
                 "GDAL_CACHEMAX",
                 str(gdal_cachemax),
                 "-wm",
                 str(warp_memsize),
                 "-t_srs",
                 "EPSG:4326",
                 "-te",
                 str(min_lon),
                 str(min_lat),
                 str(max_lon),
                 str(max_lat),
                 "-tr",
                 str(dem_pixel_size),
                 str(dem_pixel_size),
                 "-r",
                 "bilinear",
                 GEOID_PATH,
                 geoid_chip],
                fail_msg="Failed to subset geoid %s" % GEOID_PATH,
                cwd=dem_dir)

        # Add the geoid to the DEM chip
        dem_plus_geoid_chip = os.path.join(dem_dir, base_name + "_DEM_PLUS_GEOID.tif")
        logger.info("Adding geoid to DEM")
        run_cmd(["gdal_calc.py",
                 "-A",
                 dem_chip,
                 "-B",
                 geoid_chip,
                 "--calc",
                 "A+B",
                 "--outfile",
                 dem_plus_geoid_chip],
                fail_msg="Failed to add geoid %s to DEM %s" % (geoid_chip, dem_chip),
                cwd=dem_dir)
        dem_chip = dem_plus_geoid_chip

    return dem_chip
Esempio n. 45
0
def download_tiles(dem_url, aoi_geom, output_vrt, margin=DEFAULT_DEM_MARGIN_DEG):
    """Fetches DEM tiles from S3.

    If the specified URL is a single object, it is assumed to be a
    text file containing the prefix of the DEM in the current
    bucket. This can be used like a symbolic link to point at the most
    recent version in the bucket. For example, the GBDX account
    contains a factory DEM at s3://dgdem/current.txt. Inside of
    current.txt is the actual prefix, "DEM_2016_08_12". So tiles are
    stored under s3://dgdem/DEM_2016_08_12.

    If the specified URL is not a single object (i.e. it is a prefix),
    DEM tiles are assumed to be stored within it. The tiles are
    assumed to be in the EPSG:4326 DG tiling scheme.

    Args:
        dem_url: S3 URL of DEM tiles.
        aoi_geom: Shapely geometry defining the AOI to cover.
        output_vrt: Filename of output VRT containing the DEM tiles
            that cover the AOI.
        margin: Margin in degrees to buffer around AOI.

    Returns True if the tiles were downloaded successfully, False if not.

    """

    # Check whether the DEM URL is a prefix
    s3 = boto3.resource("s3")
    (bucket_name, key) = parse_s3_url(dem_url)
    obj = s3.Object(bucket_name, key)
    try:
        obj.content_length
        is_prefix = False
        logger.info("%s is a pointer, downloading contents" % dem_url)
    except ClientError as exc:
        if "404" in str(exc):
            is_prefix = True
            logger.info("%s is a prefix" % dem_url)
        else:
            logger.error(str(exc))
            return False
    bucket = s3.Bucket(bucket_name)

    # If the URL is not a prefix, open it to find the actual prefix
    if not is_prefix:
        # Download file with prefix in it
        pointer = bucket.Object(key)
        prefix = pointer.get()["Body"].read().strip()
    else:
        # URL is already a prefix
        prefix = key

    # Get the tile keys
    logger.info("Searching for tiles under s3://%s/%s" % (bucket_name, prefix))
    dem_tiles = {}
    for obj in bucket.objects.filter(Prefix=prefix):
        mobj = re.search("([0-3]+)[^/]+$", obj.key)
        if mobj is not None:
            dem_tiles[mobj.group(1)] = obj.key
    if not dem_tiles:
        logger.error("No DEM tiles found in bucket %s under prefix %s" % \
                     (bucket_name, prefix))
        return False

    # Grab a key and determine the zoom level
    zoom_level = len(dem_tiles.keys()[0])
    logger.info("Found %d tiles, assuming zoom level %d" % (len(dem_tiles), zoom_level))

    # Get the quadkeys that cover the AOI
    scheme = tiletanic.tileschemes.DGTiling()
    tile_gen = tiletanic.tilecover.cover_geometry(scheme, aoi_geom, zoom_level)
    qks = [scheme.quadkey(tile) for tile in tile_gen]
    missing_qks = [qk for qk in qks if qk not in dem_tiles]
    if missing_qks:
        logger.error("%d quadkeys missing from DEM: %s" % \
                     (len(missing_qks), ", ".join(missing_qks)))
        return False
    logger.info("Found %d quadkeys that cover the AOI: %s" % \
                (len(qks), ", ".join(qks)))

    # Download DEM tiles from S3
    out_dir = os.path.dirname(output_vrt)
    dem_files = []
    for qk in qks:
        key = dem_tiles[qk]
        dem_filename = os.path.join(out_dir, key)
        dem_filename_dir = os.path.dirname(dem_filename)
        if not os.path.isdir(dem_filename_dir):
            os.makedirs(dem_filename_dir)
        dem_files.append(dem_filename)
        logger.info("Downloading quadkey %s tile s3://%s/%s to %s" % \
                    (qk, bucket_name, key, dem_filename))
        bucket.download_file(key, dem_filename)

    # Generate VRT containing DEM tiles
    logger.info("Creating VRT " + output_vrt)
    run_cmd(["gdalbuildvrt",
             "-vrtnodata",
             "None",
             output_vrt] + dem_files,
            fail_msg="Failed to create DEM VRT")
    return True
                    nrow=nrow,
                    xll=gt_act_et[0],
                    yll=gt_act_et[3] - nrow * cellsize,
                    cellsize=cellsize,
                    nodata=-9999.,
                    data=( act_et_data + interception_data ),
                    filename=total_act_et_filename )
        mytotal_act_etfiles.append( total_act_et_filename )


filelist = open( 'huc_list_recharge_vrt.txt', 'w')
[filelist.write( filename + '\n' ) for filename in myrechargefiles]
filelist.close()

command_args = ['-input_file_list','huc_list_recharge_vrt.txt','concatenated_swb_recharge.vrt']
rc.run_cmd( command_text='gdalbuildvrt', command_arguments=command_args )

filelist = open( 'huc_list_gross_precip_vrt.txt', 'w')
[filelist.write( filename + '\n' ) for filename in myprecipfiles]
filelist.close()

command_args = ['-input_file_list','huc_list_gross_precip_vrt.txt','concatenated_swb_gross_precip.vrt']
rc.run_cmd( command_text='gdalbuildvrt', command_arguments=command_args )


filelist = open( 'huc_list_interception_vrt.txt', 'w')
[filelist.write( filename + '\n' ) for filename in myinterceptionfiles]
filelist.close()

command_args = ['-input_file_list','huc_list_interception_vrt.txt','concatenated_swb_interception.vrt']
rc.run_cmd( command_text='gdalbuildvrt', command_arguments=command_args )
Esempio n. 47
0
 def delete_hdfs(self):
     """Borra archivos de la corrida de HDFS"""
     path = self.path
     run_cmd(["hadoop", "fs", "-rm", "-r", "-f", path])
Esempio n. 48
0
def gdal_ortho(input_dir, output_dir, target_srs, pixel_size, aoi, bands,
               rpc_dem, use_hae, apply_geoid, resampling_method,
               error_threshold, create_vrts, gdal_cachemax, warp_memsize,
               warp_threads, num_parallel, tmpdir):
    """Wrapper for orthorectification using GDAL utilities.

    This script orthorectifies a DigitalGlobe 1B product using the
    provided RPCs. The input directory should contain a single 1B
    product.

    This script assumes that the necessary utilities are accessible
    from the execution environment.

    """

    # Fix paths
    input_dir = os.path.realpath(input_dir)
    output_dir = os.path.realpath(output_dir)
    if rpc_dem is not None:
        rpc_dem = os.path.realpath(rpc_dem)
    if tmpdir is not None:
        tmpdir = os.path.realpath(tmpdir)

    # Parse band list
    if bands is not None:
        bands_to_process = set(re.split(r"\s+|\s*,\s*", bands))
    else:
        bands_to_process = None

    # Override DEM input if using height above ellipsoid
    if use_hae:
        rpc_dem = None

    # Walk the input directory to find all the necessary files. Store
    # by part number then by band.
    part_shps = defaultdict(dict)
    part_dirs = defaultdict(dict)
    part_info = defaultdict(dict)
    for (path, dirs, files) in os.walk(input_dir, followlinks=True):
        # Look for GIS_FILES directory
        if os.path.basename(path).lower() == "gis_files":
            for f in files:
                # Find the per-part PIXEL_SHAPE files
                m_obj = re.search(r"\w+-(\w)\w+-\w+_p(\d+)_pixel_shape.shp$",
                                  f,
                                  flags=re.IGNORECASE)
                if m_obj is not None:
                    band_char = m_obj.group(1)
                    part_num = int(m_obj.group(2))
                    if band_char not in FILENAME_BAND_ALIASES:
                        logger.warn("PIXEL_SHAPE filename %s contains unknown band character %s" % \
                                    (f, band_char))
                    else:
                        band_alias = FILENAME_BAND_ALIASES[band_char]
                        part_shps[part_num][band_alias] = os.path.join(path, f)

        # Look for part directories
        m_obj = re.search(r".+_p(\d+)_\w+$", path, flags=re.IGNORECASE)
        if m_obj is not None:
            part_num = int(m_obj.group(1))

            # Look for IMD files
            imd_info = None
            for f in files:
                if os.path.splitext(f)[1].lower() == ".imd":
                    imd_info = __parse_imd(os.path.join(path, f))
            if imd_info is None:
                logger.warn("Part directory %s has no IMD file" % path)
            else:
                # Check band ID
                if imd_info.band_id not in IMD_BAND_ALIASES:
                    logger.warn("IMD file %s contains unknown bandId %s" % \
                                (imd_info.imd_file, imd_info.band_id))
                else:
                    band_alias = IMD_BAND_ALIASES[imd_info.band_id]
                    if bands_to_process is not None and \
                       band_alias not in bands_to_process:
                        logger.info("Skipping part directory %s (%s)" % \
                                    (path, band_alias))
                    else:
                        # Save this part directory
                        part_dirs[part_num][band_alias] = path
                        part_info[part_num][band_alias] = imd_info
                        logger.info("Found part directory %s (%s)" % \
                                    (path, band_alias))
    logger.info("Found %d part directories" % len(part_dirs))

    # Load all the shapefiles into one big geometry
    geoms = []
    for band_shps in part_shps.itervalues():
        for shp_filename in band_shps.itervalues():
            with fiona.open(shp_filename, "r") as shp:
                geoms += [
                    shapely.geometry.shape(rec["geometry"]) for rec in shp
                ]
    full_geom = shapely.ops.unary_union(geoms)

    # Handle special "UTM" target SRS
    utm_epsg_code = None
    if target_srs.lower() == "utm":
        utm_epsg_code = __get_utm_epsg_code(full_geom.centroid.y,
                                            full_geom.centroid.x)
        target_srs = "EPSG:%d" % utm_epsg_code
        logger.info("UTM target SRS is %s" % target_srs)

    # Create a PROJ.4 string of the target SRS for easy fiona calls
    try:
        srs = osr.SpatialReference()
        srs.SetFromUserInput(str(target_srs))
        target_srs_proj4 = srs.ExportToProj4()
    finally:
        srs = None

    # Transform the full geometry into the target SRS. Its bounding
    # box defines the origin of the grid that each TIF should be
    # orthorectified into.
    src_crs = fiona.crs.from_epsg(4326)
    dst_crs = fiona.crs.from_string(target_srs_proj4)
    full_geom_srs = shapely.geometry.mapping(full_geom)
    full_geom_srs = fiona.transform.transform_geom(src_crs, dst_crs,
                                                   full_geom_srs)
    full_geom_srs = shapely.geometry.shape(full_geom_srs)
    grid_origin = full_geom_srs.bounds[0:2]
    logger.info("Ortho grid origin: %.10f, %.10f" % \
                (grid_origin[0], grid_origin[1]))

    # Check whether pixel_size needs to be calculated
    if pixel_size is None:
        # Loop over all the image info and find the best (smallest)
        # GSD. This will be used to define the pixel size in the
        # target SRS.
        min_gsd = min([
            info.avg_gsd for band_info in part_info.itervalues()
            for info in band_info.itervalues()
        ])
        logger.info("Best input GSD is %.10f" % min_gsd)

        # Get the UTM zone to use
        if utm_epsg_code is None:
            utm_epsg_code = __get_utm_epsg_code(full_geom.centroid.y,
                                                full_geom.centroid.x)

        # Transform the full geometry's centroid into UTM
        src_crs = fiona.crs.from_epsg(4326)
        dst_crs = fiona.crs.from_epsg(utm_epsg_code)
        pt = shapely.geometry.mapping(full_geom.centroid)
        pt = fiona.transform.transform_geom(src_crs, dst_crs, pt)
        pt = shapely.geometry.shape(pt)

        # Add the best GSD to define a square in UTM space
        pix = shapely.geometry.box(pt.x, pt.y, pt.x + min_gsd, pt.y + min_gsd)

        # Transform the pixel box into the target SRS
        src_crs = dst_crs
        dst_crs = fiona.crs.from_string(target_srs_proj4)
        pix = shapely.geometry.mapping(pix)
        pix = fiona.transform.transform_geom(src_crs, dst_crs, pix)
        pix = shapely.geometry.shape(pix)

        # Use the smaller dimension from the bounding box of the
        # transformed pixel as the pixel size. The larger dimension
        # will just end up being slightly oversampled, so no data is
        # lost.
        bounds = pix.bounds
        pixel_size = min(abs(bounds[2] - bounds[0]),
                         abs(bounds[3] - bounds[1]))
        logger.info("Calculated pixel size in target SRS is %.10f" %
                    pixel_size)

    # Find average height above ellipsoid over all parts
    hae_vals = [
        info.avg_hae for band_info in part_info.itervalues()
        for info in band_info.itervalues()
    ]
    if hae_vals:
        avg_hae = sum(hae_vals) / len(hae_vals)
    else:
        avg_hae = 0.0
    logger.info("Average height above ellipsoid is %.10f" % avg_hae)

    # Create a pool of worker threads. Each worker thread will call
    # out to GDAL utilities to do actual work.
    worker_pool = ThreadPoolExecutorWithCallback(max_workers=num_parallel)

    # Create a working directory for temporary data
    temp_dir = tempfile.mkdtemp(dir=tmpdir)
    try:
        # Check whether to download DEM data from S3
        if not use_hae and rpc_dem is None:
            # The use_hae flag is not set and no DEM path was
            # specified on the command line. Try to copy from S3.
            dem_vrt = os.path.join(temp_dir,
                                   "dgdem_" + str(uuid.uuid4()) + ".vrt")
            if dem.fetch_dgdem_tiles(full_geom.buffer(DEM_FETCH_MARGIN_DEG),
                                     dem_vrt):
                logger.info("Downloaded DEM tiles, using VRT %s" % dem_vrt)
                rpc_dem = dem_vrt
            else:
                logger.warn(
                    "Failed to download DEM tiles from S3, reverting to "
                    "average height above ellipsoid")
                use_hae = True

        # Loop over parts and submit jobs to worker pool
        for part_num in part_dirs.iterkeys():
            # Extract bands for this part
            band_shps = part_shps[part_num]
            band_dirs = part_dirs[part_num]
            band_info = part_info[part_num]

            # Submit job
            worker_pool.submit(worker_thread, part_num, band_dirs, band_info,
                               band_shps, target_srs, target_srs_proj4,
                               grid_origin, pixel_size, aoi, rpc_dem, avg_hae,
                               apply_geoid, resampling_method, error_threshold,
                               gdal_cachemax, warp_memsize, warp_threads,
                               input_dir, output_dir, temp_dir)

        # Wait for all workers to finish
        canceled = False
        try:
            while worker_pool.is_running():
                time.sleep(1)
        except KeyboardInterrupt:
            canceled = True
            logger.warn("Received interrupt, canceling pending jobs...")
            num_canceled = worker_pool.cancel_all()
            logger.warn("Canceled %d pending jobs" % num_canceled)
            time.sleep(1)

        # Wait for workers to finish
        worker_pool.shutdown()

        # Create VRTs if requested
        if create_vrts and not canceled:
            # Walk output directory looking for TIFs
            tifs_by_band = defaultdict(list)
            for (path, dirs, files) in os.walk(output_dir, followlinks=True):
                for f in files:
                    m_obj = re.search(r"\w+-(\w)\w+-\w+_p\d+.tif$",
                                      f,
                                      flags=re.IGNORECASE)
                    if m_obj is not None:
                        band_char = m_obj.group(1)
                        if band_char not in FILENAME_BAND_ALIASES:
                            logger.warn("Output TIF filename %s contains unknown band character %s" % \
                                        (f, band_char))
                        else:
                            band_alias = FILENAME_BAND_ALIASES[band_char]
                            tifs_by_band[band_alias].append(
                                os.path.join(path, f))

            # Create a VRT for each band
            for (band_alias, tif_list) in tifs_by_band.iteritems():
                # Use the first TIF's name
                m_obj = re.search(r"(.+)_p\d+.tif$",
                                  os.path.basename(tif_list[0]),
                                  flags=re.IGNORECASE)
                if m_obj is not None:
                    vrt_name = m_obj.group(1) + ".vrt"
                else:
                    vrt_name = "ortho_%s.vrt" % band_alias

                # Get relative paths to files from the output directory
                relpaths = [os.path.relpath(f, output_dir) for f in tif_list]

                # Create VRT (paths are relative to output_dir)
                logger.info("Creating band %s VRT %s" % (band_alias, vrt_name))
                run_cmd(["gdalbuildvrt", "-srcnodata", "0", vrt_name] +
                        relpaths,
                        fail_msg="Failed to create band %s VRT %s" %
                        (band_alias, vrt_name),
                        cwd=output_dir)

    finally:
        # Delete the temporary directory and its contents
        shutil.rmtree(temp_dir)
Esempio n. 49
0
def set_brightness(new_level, time):
    run_cmd("xbacklight -set %s -time %s" % (new_level, time))
Esempio n. 50
0
def worker_thread(part_num, band_dirs, band_info, band_shps, target_srs,
                  target_srs_proj4, grid_origin, pixel_size, aoi, rpc_dem,
                  avg_hae, apply_geoid, resampling_method, error_threshold,
                  gdal_cachemax, warp_memsize, warp_threads, input_dir,
                  output_dir, temp_dir):
    """Orthorectifies a 1B part using GDAL utilities.

    Args:
        part_num: Part number.
        band_dirs: Dictionary containing directory paths for each band
            in the part.
        band_info: Tuples generated from __parse_imd for each band in
            the part.
        band_shps: Paths to shapefiles for each band in the part.
        target_srs: Spatial reference system to warp into.
        target_srs_proj4: PROJ.4 string of the target SRS.
        grid_origin: Tuple containing origin x and y of the ortho grid.
        pixel_size: Requested pixel size in target SRS units.
        aoi: Tuple containing min x, min y, max x, and max y bounds of
            AOI to orthorectify in target SRS units.
        rpc_dem: Path to DEM to use for warping.
        avg_hae: Average height above ellipsoid for all input images.
        apply_geoid: True to add geoid height to DEM, false to
            skip. Necessary for DEMs that are measured from
            geoid. (Most are.)
        resampling_method: Resampling method to use for warping.
        error_threshold: Error threshold in pixels for gdalwarp.
        gdal_cachemax: Cache size to use for GDAL utilities.
        warp_memsize: Extra cache size for warping.
        warp_threads: Number of threads to use for warping.
        input_dir: Path to base input directory.
        output_dir: Path to base output directory.
        temp_dir: Path to scratch directory for intermediate files.

    """

    # Determine per-band pixel sizes
    min_gsd = min([info.avg_gsd for info in band_info.itervalues()])
    band_pixel_sizes = {}
    for (band, info) in band_info.iteritems():
        band_pixel_sizes[band] = pixel_size * round(info.avg_gsd / min_gsd)

    # Loop over bands
    dem_chip = None
    for (band, band_input_dir) in band_dirs.iteritems():
        logger.info("Processing part P%03d band %s" % (part_num, band))
        shp_filename = band_shps[band]
        imd_filename = band_info[band].imd_file
        xml_filename = os.path.splitext(imd_filename)[0] + ".XML"
        band_pixel_size = band_pixel_sizes[band]

        # Read in the shapefile
        with fiona.open(shp_filename, "r") as shp:
            recs = [shapely.geometry.shape(rec["geometry"]) for rec in shp]
            band_geom = shapely.ops.unary_union(recs)

        # Transform the geometry into the target SRS
        src_crs = fiona.crs.from_epsg(4326)
        dst_crs = fiona.crs.from_string(target_srs_proj4)
        band_geom_srs = shapely.geometry.mapping(band_geom)
        band_geom_srs = fiona.transform.transform_geom(src_crs, dst_crs,
                                                       band_geom_srs)
        band_geom_srs = shapely.geometry.shape(band_geom_srs)

        # If an AOI was provided, intersect it with the geometry to
        # subset the output image
        if aoi:
            # Create a shapely geometry representing the AOI. The
            # coordinates are in the target SRS already.
            aoi_geom = shapely.geometry.Polygon([
                (aoi[0], aoi[1]),  # LL
                (aoi[0], aoi[3]),  # UL
                (aoi[2], aoi[3]),  # UR
                (aoi[2], aoi[1]),  # LR
                (aoi[0], aoi[1])
            ])  # LL

            # Intersect the AOI with the full geometry. If there is no
            # intersection, there's no work to be done for this part.
            band_geom_srs = band_geom_srs.intersection(aoi_geom)
            if band_geom_srs.area == 0:
                logger.info("Part P%03d band %s does not intersect AOI" % \
                            (part_num, band))
                continue

        # Calculate the extents to use given the ortho grid
        # origin. This ensures that all images are orthorectified into
        # an aligned grid. Expand the geometry bounds to the nearest
        # pixel, then calculate the extents as pixel offsets from the
        # origin.
        bounds = band_geom_srs.bounds
        min_pix_x = math.floor((bounds[0] - grid_origin[0]) / band_pixel_size)
        min_pix_y = math.floor((bounds[1] - grid_origin[1]) / band_pixel_size)
        max_pix_x = math.ceil((bounds[2] - grid_origin[0]) / band_pixel_size)
        max_pix_y = math.ceil((bounds[3] - grid_origin[1]) / band_pixel_size)
        min_extent_x = grid_origin[0] + (min_pix_x * band_pixel_size)
        min_extent_y = grid_origin[1] + (min_pix_y * band_pixel_size)
        max_extent_x = grid_origin[0] + (max_pix_x * band_pixel_size)
        max_extent_y = grid_origin[1] + (max_pix_y * band_pixel_size)

        # Find all TIFs in the input directory
        tif_list = [
            os.path.join(band_input_dir, f) for f in os.listdir(band_input_dir)
            if f.lower().endswith(".tif")
        ]

        # Check whether DEM is available
        if rpc_dem is not None:
            # Get the DEM chip if it hasn't already been created
            if dem_chip is None:
                dem_chip = dem.local_dem_chip(os.path.basename(band_input_dir),
                                              temp_dir, band_geom, rpc_dem,
                                              apply_geoid, gdal_cachemax,
                                              warp_memsize, warp_threads,
                                              DEM_CHIP_MARGIN_DEG)

            # Orthorectify all TIFs in the input directory
            for input_file in tif_list:
                logger.info("Orthorectifying %s to SRS %s using pixel size %.10f" % \
                            (input_file, target_srs, band_pixel_size))

                # Get path of TIF relative to input_dir. This provides
                # the path below output_dir to use for the output
                # file.
                tif_rel_path = os.path.relpath(input_file, input_dir)
                output_file = __update_filename(
                    os.path.join(output_dir, tif_rel_path))
                output_file_dir = os.path.dirname(output_file)
                if not os.path.isdir(output_file_dir):
                    os.makedirs(output_file_dir)
                args = ["gdalwarp"]
                args += ["--config", "GDAL_CACHEMAX", str(gdal_cachemax)]
                args += ["-wm", str(warp_memsize)]
                args += ["-t_srs", str(target_srs)]
                args += ["-rpc"]
                args += [
                    "-te",
                    str(min_extent_x),
                    str(min_extent_y),
                    str(max_extent_x),
                    str(max_extent_y)
                ]
                args += ["-tr", str(band_pixel_size), str(band_pixel_size)]
                args += ["-r", str(resampling_method)]
                args += ["-et", str(error_threshold)]
                args += ["-multi"]
                args += ["-wo", "NUM_THREADS=%s" % warp_threads]
                args += ["-to", "RPC_DEM=%s" % dem_chip]
                args += ["-to", "RPC_DEMINTERPOLATION=bilinear"]
                args += ["-co", "TILED=YES"]
                args += [input_file]
                args += [output_file]
                run_cmd(args,
                        fail_msg="Failed to orthorectify %s using DEM %s" % \
                        (input_file, dem_chip),
                        cwd=temp_dir)

                # Copy the input file's IMD to the output
                # location. Also copy the corresponding XML file if it
                # exists.
                updated_imd_filename = __update_filename(
                    os.path.join(output_file_dir,
                                 os.path.basename(imd_filename)))
                shutil.copy(imd_filename, updated_imd_filename)
                __update_product_level(updated_imd_filename)
                if os.path.isfile(xml_filename):
                    updated_xml_filename = __update_filename(
                        os.path.join(output_file_dir,
                                     os.path.basename(xml_filename)))
                    shutil.copy(xml_filename, updated_xml_filename)
                    __update_product_level(updated_xml_filename)

        else:  # rpc_dem is None
            # Orthorectify all TIFs in the input directory using
            # average height above ellipsoid
            for input_file in tif_list:
                logger.info("Orthorectifying %s to SRS %s using pixel size %.10f" % \
                            (input_file, target_srs, band_pixel_size))

                # Get path of TIF relative to input_dir. This provides
                # the path below output_dir to use for the output
                # file.
                tif_rel_path = os.path.relpath(input_file, input_dir)
                output_file = __update_filename(
                    os.path.join(output_dir, tif_rel_path))
                output_file_dir = os.path.dirname(output_file)
                if not os.path.isdir(output_file_dir):
                    os.makedirs(output_file_dir)
                args = ["gdalwarp"]
                args += ["--config", "GDAL_CACHEMAX", str(gdal_cachemax)]
                args += ["-wm", str(warp_memsize)]
                args += ["-t_srs", str(target_srs)]
                args += ["-rpc"]
                args += [
                    "-te",
                    str(min_extent_x),
                    str(min_extent_y),
                    str(max_extent_x),
                    str(max_extent_y)
                ]
                args += ["-tr", str(band_pixel_size), str(band_pixel_size)]
                args += ["-r", str(resampling_method)]
                args += ["-et", str(error_threshold)]
                args += ["-multi"]
                args += ["-wo", "NUM_THREADS=%s" % warp_threads]
                args += ["-to", "RPC_HEIGHT=%s" % avg_hae]
                args += ["-co", "TILED=YES"]
                args += [input_file]
                args += [output_file]
                run_cmd(args,
                        fail_msg="Failed to orthorectify %s using average height %.10f" % \
                        (input_file, avg_hae),
                        cwd=temp_dir)

                # Copy the input file's IMD to the output
                # location. Also copy the corresponding XML file if it
                # exists.
                shutil.copy(
                    imd_filename,
                    __update_filename(
                        os.path.join(output_file_dir,
                                     os.path.basename(imd_filename))))
                if os.path.isfile(xml_filename):
                    shutil.copy(
                        xml_filename,
                        __update_filename(
                            os.path.join(output_file_dir,
                                         os.path.basename(xml_filename))))
Esempio n. 51
0
def test_loop(cmd, loop):
    for i in range(loop):
        print run_cmd.run_cmd(cmd)
Esempio n. 52
0
 def scan_object_for_functions(self,object):
     ace_dir=os.path.dirname(os.path.realpath(__file__))
     method_lister=os.path.join(ace_dir,"method_list")
     args = [method_lister,object];
     result = run_cmd(args,echo=False)
     return result.stdout
Esempio n. 53
0
def gdal_ortho(input_dir,
               output_dir,
               target_srs,
               pixel_size,
               aoi,
               bands,
               rpc_dem,
               use_hae,
               apply_geoid,
               resampling_method,
               error_threshold,
               create_vrts,
               gdal_cachemax,
               warp_memsize,
               warp_threads,
               num_parallel,
               tmpdir):
    """Wrapper for orthorectification using GDAL utilities.

    This script orthorectifies a DigitalGlobe 1B product using the
    provided RPCs. The input directory should contain a single 1B
    product.

    This script assumes that the necessary utilities are accessible
    from the execution environment.

    """

    # Create a working directory for temporary data
    if tmpdir is not None:
        tmpdir = os.path.realpath(tmpdir)
    temp_dir = tempfile.mkdtemp(dir=tmpdir)
    try:
        # Handle S3 input
        if aws.is_s3_url(input_dir):
            s3_input_prefix = input_dir
            if not s3_input_prefix.endswith("/"):
                s3_input_prefix += "/"
            input_dir = tempfile.mkdtemp(dir=temp_dir)
            run_cmd(["aws", "s3", "sync", s3_input_prefix, input_dir],
                    fail_msg="Failed to download input %s to %s" % \
                    (s3_input_prefix, input_dir),
                    cwd=input_dir)

        # Handle S3 output
        s3_output_prefix = None
        if aws.is_s3_url(output_dir):
            s3_output_prefix = output_dir
            if not s3_output_prefix.endswith("/"):
                s3_output_prefix += "/"
            output_dir = tempfile.mkdtemp(dir=temp_dir)

        # Fix paths
        input_dir = os.path.realpath(input_dir)
        output_dir = os.path.realpath(output_dir)
        if os.path.exists(rpc_dem):
            rpc_dem = os.path.realpath(rpc_dem)

        # Parse band list
        if bands is not None:
            bands_to_process = set(re.split(r"\s+|\s*,\s*", bands))
        else:
            bands_to_process = None

        # Walk the input directory to find IMD files and shapefiles
        imd_paths = {}
        shp_paths = {}
        for (path, dirs, files) in os.walk(input_dir, followlinks=True):
            # Look for GIS_FILES directory
            if os.path.basename(path).lower() == "gis_files":
                for f in files:
                    # Find PIXEL_SHAPE shapefiles
                    m_obj = re.search(r"^(.+)_pixel_shape.shp$", f, flags=re.IGNORECASE)
                    if m_obj is not None:
                        shp_paths[m_obj.group(1)] = os.path.join(path, f)
            else:
                # Look for IMDs
                for f in files:
                    if f.lower().endswith(".imd"):
                        imd_paths[os.path.splitext(f)[0]] = os.path.join(path, f)

        # Parse IMDs to determine the bands in each file
        imd_infos = defaultdict(list)
        for (base_name, imd_file) in imd_paths.iteritems():
            imd_info = __parse_imd(imd_file)
            if imd_info.band_id not in IMD_BAND_ALIASES:
                logger.warn("IMD file %s contains unknown bandId %s" % \
                            (imd_info.imd_file, imd_info.band_id))
            else:
                band_alias = IMD_BAND_ALIASES[imd_info.band_id]
                if bands_to_process is not None and \
                   band_alias not in bands_to_process:
                    logger.info("Skipping %s (%s)" % (base_name, band_alias))
                else:
                    # This IMD belongs to an image to be processed.
                    # Group by basename without respect to the bands
                    # in the filename (i.e. group corresponding P1BS
                    # and M1BS together).
                    imd_infos[__get_general_basename(base_name)].append(imd_info)
        logger.info("Found %d images to orthorectify" % len(imd_infos))

        # Load all the shapefiles into one big geometry
        geoms = []
        for shp_filename in shp_paths.itervalues():
            with fiona.open(shp_filename, "r") as shp:
                geoms += [shapely.geometry.shape(rec["geometry"]) for rec in shp]
        full_geom = shapely.ops.unary_union(geoms)

        # Handle special "UTM" target SRS
        utm_epsg_code = None
        if target_srs.lower() == "utm":
            utm_epsg_code = __get_utm_epsg_code(full_geom.centroid.y,
                                                full_geom.centroid.x)
            target_srs = "EPSG:%d" % utm_epsg_code
            logger.info("UTM target SRS is %s" % target_srs)

        # Create a PROJ.4 string of the target SRS for easy fiona calls
        try:
            srs = osr.SpatialReference()
            srs.SetFromUserInput(str(target_srs))
            target_srs_proj4 = srs.ExportToProj4()
        finally:
            srs = None

        # Transform the full geometry into the target SRS. Its
        # bounding box defines the origin of the grid that each TIF
        # should be orthorectified into.
        src_crs = fiona.crs.from_epsg(4326)
        dst_crs = fiona.crs.from_string(target_srs_proj4)
        full_geom_srs = shapely.geometry.mapping(full_geom)
        full_geom_srs = fiona.transform.transform_geom(src_crs,
                                                       dst_crs,
                                                       full_geom_srs)
        full_geom_srs = shapely.geometry.shape(full_geom_srs)
        grid_origin = full_geom_srs.bounds[0:2]
        logger.info("Ortho grid origin: %.10f, %.10f" % \
                    (grid_origin[0], grid_origin[1]))

        # Check whether pixel_size needs to be calculated
        if pixel_size is None:
            # Loop over all the image info and find the best
            # (smallest) GSD. This will be used to define the pixel
            # size in the target SRS.
            min_gsd = min([imd_info.avg_gsd
                           for imd_info_list in imd_infos.itervalues()
                           for imd_info in imd_info_list])
            logger.info("Best input GSD is %.10f" % min_gsd)

            # Get the UTM zone to use
            if utm_epsg_code is None:
                utm_epsg_code = __get_utm_epsg_code(full_geom.centroid.y,
                                                    full_geom.centroid.x)

            # Transform the full geometry's centroid into UTM
            src_crs = fiona.crs.from_epsg(4326)
            dst_crs = fiona.crs.from_epsg(utm_epsg_code)
            pt = shapely.geometry.mapping(full_geom.centroid)
            pt = fiona.transform.transform_geom(src_crs, dst_crs, pt)
            pt = shapely.geometry.shape(pt)

            # Add the best GSD to define a square in UTM space
            pix = shapely.geometry.box(pt.x, pt.y, pt.x + min_gsd, pt.y + min_gsd)

            # Transform the pixel box into the target SRS
            src_crs = dst_crs
            dst_crs = fiona.crs.from_string(target_srs_proj4)
            pix = shapely.geometry.mapping(pix)
            pix = fiona.transform.transform_geom(src_crs, dst_crs, pix)
            pix = shapely.geometry.shape(pix)

            # Use the smaller dimension from the bounding box of the
            # transformed pixel as the pixel size. The larger
            # dimension will just end up being slightly oversampled,
            # so no data is lost.
            bounds = pix.bounds
            pixel_size = min(abs(bounds[2] - bounds[0]),
                             abs(bounds[3] - bounds[1]))
            logger.info("Calculated pixel size in target SRS is %.10f" % pixel_size)

        # Find average height above ellipsoid over all parts
        hae_vals = [imd_info.avg_hae
                    for imd_info_list in imd_infos.itervalues()
                    for imd_info in imd_info_list]
        if hae_vals:
            avg_hae = sum(hae_vals) / len(hae_vals)
        else:
            avg_hae = 0.0
        logger.info("Average height above ellipsoid is %.10f" % avg_hae)

        # Create a pool of worker threads. Each worker thread will
        # call out to GDAL utilities to do actual work.
        worker_pool = ThreadPoolExecutorWithCallback(max_workers=num_parallel)

        # Check whether to download DEM data from S3
        if not use_hae and not os.path.exists(rpc_dem):
            # The use_hae flag is not set and the DEM path doesn't
            # exist locally. Assume it is an S3 path.
            dem_vrt = os.path.join(temp_dir, "s3dem_" + str(uuid.uuid4()) + ".vrt")
            if dem.download_tiles(rpc_dem,
                                  full_geom.buffer(DEM_FETCH_MARGIN_DEG),
                                  dem_vrt):
                logger.info("Downloaded DEM tiles, using VRT %s" % dem_vrt)
                rpc_dem = dem_vrt
            else:
                logger.warn("Failed to download DEM tiles from S3, reverting to "
                            "average height above ellipsoid")
                use_hae = True

        # Loop over images and submit jobs to worker pool
        for (gen_base_name, imd_info_list) in imd_infos.iteritems():
            # Get inputs by band
            band_info = {}
            band_shps = {}
            for imd_info in imd_info_list:
                imd_base_name = os.path.splitext(os.path.basename(imd_info.imd_file))[0]
                if imd_base_name not in shp_paths:
                    logger.warn("Base name %s missing from GIS_FILES" % imd_base_name)
                else:
                    band_info[imd_info.band_id] = imd_info
                    band_shps[imd_info.band_id] = shp_paths[imd_base_name]

            # Submit job
            worker_pool.submit(worker_thread,
                               gen_base_name,
                               band_info,
                               band_shps,
                               target_srs,
                               target_srs_proj4,
                               grid_origin,
                               pixel_size,
                               aoi,
                               rpc_dem,
                               avg_hae,
                               apply_geoid,
                               resampling_method,
                               error_threshold,
                               gdal_cachemax,
                               warp_memsize,
                               warp_threads,
                               input_dir,
                               output_dir,
                               temp_dir)

        # Wait for all workers to finish
        canceled = False
        try:
            while worker_pool.is_running():
                time.sleep(1)
        except KeyboardInterrupt:
            canceled = True
            logger.warn("Received interrupt, canceling pending jobs...")
            num_canceled = worker_pool.cancel_all()
            logger.warn("Canceled %d pending jobs" % num_canceled)
            time.sleep(1)

        # Wait for workers to finish
        worker_pool.shutdown()

        # Create VRTs if requested
        if create_vrts and not canceled:
            # Walk output directory looking for TIFs
            tifs_by_band = defaultdict(list)
            for (path, dirs, files) in os.walk(output_dir, followlinks=True):
                for f in files:
                    m_obj = re.search(r"\w+-(\w)\w+-\w+_p\d+.tif$", f, flags=re.IGNORECASE)
                    if m_obj is not None:
                        band_char = m_obj.group(1)
                        if band_char not in FILENAME_BAND_ALIASES:
                            logger.warn("Output TIF filename %s contains unknown band character %s" % \
                                        (f, band_char))
                        else:
                            band_alias = FILENAME_BAND_ALIASES[band_char]
                            tifs_by_band[band_alias].append(os.path.join(path, f))

            # Create a VRT for each band
            for (band_alias, tif_list) in tifs_by_band.iteritems():
                # Use the first TIF's name
                m_obj = re.search(r"(.+)_p\d+.tif$",
                                  os.path.basename(tif_list[0]),
                                  flags=re.IGNORECASE)
                if m_obj is not None:
                    vrt_name = m_obj.group(1) + ".vrt"
                else:
                    vrt_name = "ortho_%s.vrt" % band_alias

                # Get relative paths to files from the output directory
                relpaths = [os.path.relpath(f, output_dir) for f in tif_list]

                # Create VRT (paths are relative to output_dir)
                logger.info("Creating band %s VRT %s" % (band_alias, vrt_name))
                run_cmd(["gdalbuildvrt",
                         "-srcnodata",
                         "0",
                         vrt_name] + relpaths,
                        fail_msg="Failed to create band %s VRT %s" % (band_alias, vrt_name),
                        cwd=output_dir)

        # Stage output to S3 if necessary
        if s3_output_prefix is not None:
            run_cmd(["aws", "s3", "sync", output_dir, s3_output_prefix],
                    fail_msg="Failed to upload output %s to %s" % \
                    (output_dir, s3_output_prefix),
                    cwd=output_dir)

    finally:
        # Delete the temporary directory and its contents
        shutil.rmtree(temp_dir)
Esempio n. 54
0
def worker_thread(base_name,
                  band_info,
                  band_shps,
                  target_srs,
                  target_srs_proj4,
                  grid_origin,
                  pixel_size,
                  aoi,
                  rpc_dem,
                  avg_hae,
                  apply_geoid,
                  resampling_method,
                  error_threshold,
                  gdal_cachemax,
                  warp_memsize,
                  warp_threads,
                  input_dir,
                  output_dir,
                  temp_dir):
    """Orthorectifies a 1B part using GDAL utilities.

    Args:
        base_name: Name of the image(s) to be processed.
        band_info: Tuples generated from __parse_imd for each band in
            the part.
        band_shps: Paths to shapefiles for each band in the part.
        target_srs: Spatial reference system to warp into.
        target_srs_proj4: PROJ.4 string of the target SRS.
        grid_origin: Tuple containing origin x and y of the ortho grid.
        pixel_size: Requested pixel size in target SRS units.
        aoi: Tuple containing min x, min y, max x, and max y bounds of
            AOI to orthorectify in target SRS units.
        rpc_dem: Path to DEM to use for warping.
        avg_hae: Average height above ellipsoid for all input images.
        apply_geoid: True to add geoid height to DEM, false to
            skip. Necessary for DEMs that are measured from
            geoid. (Most are.)
        resampling_method: Resampling method to use for warping.
        error_threshold: Error threshold in pixels for gdalwarp.
        gdal_cachemax: Cache size to use for GDAL utilities.
        warp_memsize: Extra cache size for warping.
        warp_threads: Number of threads to use for warping.
        input_dir: Path to base input directory.
        output_dir: Path to base output directory.
        temp_dir: Path to scratch directory for intermediate files.

    """

    # Determine per-band pixel sizes
    min_gsd = min([info.avg_gsd for info in band_info.itervalues()])
    band_pixel_sizes = {}
    for (band, info) in band_info.iteritems():
        band_pixel_sizes[band] = pixel_size * round(info.avg_gsd / min_gsd)

    # Loop over bands
    dem_chip = None
    for (band, info) in band_info.iteritems():
        logger.info("Processing %s %s" % (base_name, band))
        shp_filename = band_shps[band]
        imd_filename = info.imd_file
        xml_filename = os.path.splitext(imd_filename)[0] + ".XML"
        band_pixel_size = band_pixel_sizes[band]

        # Read in the shapefile
        with fiona.open(shp_filename, "r") as shp:
            recs = [shapely.geometry.shape(rec["geometry"]) for rec in shp]
            band_geom = shapely.ops.unary_union(recs)

        # Transform the geometry into the target SRS
        src_crs = fiona.crs.from_epsg(4326)
        dst_crs = fiona.crs.from_string(target_srs_proj4)
        band_geom_srs = shapely.geometry.mapping(band_geom)
        band_geom_srs = fiona.transform.transform_geom(src_crs,
                                                       dst_crs,
                                                       band_geom_srs)
        band_geom_srs = shapely.geometry.shape(band_geom_srs)

        # If an AOI was provided, intersect it with the geometry to
        # subset the output image
        if aoi:
            # Create a shapely geometry representing the AOI. The
            # coordinates are in the target SRS already.
            aoi_geom = shapely.geometry.Polygon([(aoi[0], aoi[1]),  # LL
                                                 (aoi[0], aoi[3]),  # UL
                                                 (aoi[2], aoi[3]),  # UR
                                                 (aoi[2], aoi[1]),  # LR
                                                 (aoi[0], aoi[1])]) # LL

            # Intersect the AOI with the full geometry. If there is no
            # intersection, there's no work to be done for this part.
            band_geom_srs = band_geom_srs.intersection(aoi_geom)
            if band_geom_srs.area == 0:
                logger.info("%s %s does not intersect AOI" % \
                            (base_name, band))
                continue

        # Calculate the extents to use given the ortho grid
        # origin. This ensures that all images are orthorectified into
        # an aligned grid. Expand the geometry bounds to the nearest
        # pixel, then calculate the extents as pixel offsets from the
        # origin.
        bounds = band_geom_srs.bounds
        min_pix_x = math.floor((bounds[0] - grid_origin[0]) / band_pixel_size)
        min_pix_y = math.floor((bounds[1] - grid_origin[1]) / band_pixel_size)
        max_pix_x = math.ceil((bounds[2] - grid_origin[0]) / band_pixel_size)
        max_pix_y = math.ceil((bounds[3] - grid_origin[1]) / band_pixel_size)
        min_extent_x = grid_origin[0] + (min_pix_x * band_pixel_size)
        min_extent_y = grid_origin[1] + (min_pix_y * band_pixel_size)
        max_extent_x = grid_origin[0] + (max_pix_x * band_pixel_size)
        max_extent_y = grid_origin[1] + (max_pix_y * band_pixel_size)

        # Find the TIF corresponding to the IMD
        input_dir = os.path.dirname(imd_filename)
        imd_basename = os.path.splitext(os.path.basename(imd_filename))[0]
        tif_list = [os.path.join(input_dir, f)
                    for f in os.listdir(input_dir)
                    if f.lower().endswith(".tif") and \
                    os.path.splitext(os.path.basename(f))[0] == imd_basename]
        if len(tif_list) != 1:
            logger.warn("Found %d TIFs corresponding to IMD file %s, expected 1" % \
                        (len(tif_list), imd_filename))
        else:
            tif_filename = tif_list[0]

            # Check whether DEM is available
            if os.path.exists(rpc_dem):
                # Get the DEM chip if it hasn't already been created
                if dem_chip is None:
                    dem_chip = dem.local_dem_chip(base_name,
                                                  temp_dir,
                                                  band_geom,
                                                  rpc_dem,
                                                  apply_geoid,
                                                  gdal_cachemax,
                                                  warp_memsize,
                                                  warp_threads,
                                                  DEM_CHIP_MARGIN_DEG)

                # Orthorectify TIF
                logger.info("Orthorectifying %s to SRS %s using pixel size %.10f" % \
                            (tif_filename, target_srs, band_pixel_size))

                # Get path of TIF relative to input_dir. This provides
                # the path below output_dir to use for the output
                # file.
                tif_rel_path = os.path.relpath(tif_filename, input_dir)
                output_file = __update_filename(os.path.join(output_dir, tif_rel_path))
                output_file_dir = os.path.dirname(output_file)
                if not os.path.isdir(output_file_dir):
                    os.makedirs(output_file_dir)
                args = ["gdalwarp"]
                args += ["--config", "GDAL_CACHEMAX", str(gdal_cachemax)]
                args += ["-wm", str(warp_memsize)]
                args += ["-t_srs", str(target_srs)]
                args += ["-rpc"]
                args += ["-te", str(min_extent_x), str(min_extent_y), str(max_extent_x), str(max_extent_y)]
                args += ["-tr", str(band_pixel_size), str(band_pixel_size)]
                args += ["-r", str(resampling_method)]
                args += ["-et", str(error_threshold)]
                args += ["-multi"]
                args += ["-wo", "NUM_THREADS=%s" % warp_threads]
                args += ["-to", "RPC_DEM=%s" % dem_chip]
                args += ["-to", "RPC_DEMINTERPOLATION=bilinear"]
                args += ["-co", "TILED=YES"]
                args += [tif_filename]
                args += [output_file]
                run_cmd(args,
                        fail_msg="Failed to orthorectify %s using DEM %s" % \
                        (tif_filename, dem_chip),
                        cwd=temp_dir)

                # Copy the input file's IMD to the output
                # location. Also copy the corresponding XML file if it
                # exists.
                updated_imd_filename = __update_filename(os.path.join(output_file_dir,
                                                                      os.path.basename(imd_filename)))
                shutil.copy(imd_filename, updated_imd_filename)
                __update_product_level(updated_imd_filename)
                if os.path.isfile(xml_filename):
                    updated_xml_filename = __update_filename(os.path.join(output_file_dir,
                                                                          os.path.basename(xml_filename)))
                    shutil.copy(xml_filename, updated_xml_filename)
                    __update_product_level(updated_xml_filename)

            else: # rpc_dem does not exist
                # Orthorectify TIF using average height above ellipsoid
                logger.info("Orthorectifying %s to SRS %s using pixel size %.10f" % \
                            (tif_filename, target_srs, band_pixel_size))

                # Get path of TIF relative to input_dir. This provides
                # the path below output_dir to use for the output
                # file.
                tif_rel_path = os.path.relpath(tif_filename, input_dir)
                output_file = __update_filename(os.path.join(output_dir, tif_rel_path))
                output_file_dir = os.path.dirname(output_file)
                if not os.path.isdir(output_file_dir):
                    os.makedirs(output_file_dir)
                args = ["gdalwarp"]
                args += ["--config", "GDAL_CACHEMAX", str(gdal_cachemax)]
                args += ["-wm", str(warp_memsize)]
                args += ["-t_srs", str(target_srs)]
                args += ["-rpc"]
                args += ["-te", str(min_extent_x), str(min_extent_y), str(max_extent_x), str(max_extent_y)]
                args += ["-tr", str(band_pixel_size), str(band_pixel_size)]
                args += ["-r", str(resampling_method)]
                args += ["-et", str(error_threshold)]
                args += ["-multi"]
                args += ["-wo", "NUM_THREADS=%s" % warp_threads]
                args += ["-to", "RPC_HEIGHT=%s" % avg_hae]
                args += ["-co", "TILED=YES"]
                args += [tif_filename]
                args += [output_file]
                run_cmd(args,
                        fail_msg="Failed to orthorectify %s using average height %.10f" % \
                        (tif_filename, avg_hae),
                        cwd=temp_dir)

                # Copy the input file's IMD to the output
                # location. Also copy the corresponding XML file if it
                # exists.
                shutil.copy(imd_filename,
                            __update_filename(os.path.join(output_file_dir,
                                                           os.path.basename(imd_filename))))
                if os.path.isfile(xml_filename):
                    shutil.copy(xml_filename,
                                __update_filename(os.path.join(output_file_dir,
                                                               os.path.basename(xml_filename))))
Esempio n. 55
0
def test_once(cmd):
    print run_cmd.run_cmd(cmd)