def initialiseApp(settingsModule, user, password, email, pythonPath=None, cacheTable=None): syncdbEvents = { "yes/no": "yes\n", "Username": "******" % user, "E-mail address": "%s\n" % email, "Password": "******" % password, } system.runExpect("django-admin.py %s --settings='%s' syncdb" % (pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule), events=syncdbEvents, timeout=60) # Create cache table if cacheTable: # XXX: really we should not run this if the db file has content. try: system.run( "django-admin.py %s --settings='%s' createcachetable %s" % (pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule, cacheTable)) except: pass
def process_kernels(self, path, parameters): if PAR.SMOOTH > 0.: system.run('solver', 'smooth', hosts='head', path=path + '/' + 'kernels/sum', span=PAR.SMOOTH, parameters=parameters)
def configure(): debugOutput("Configuring") # Set mysql admin password - it is okay for this to fail if the password has been set already. system.run( """echo "set password for root@localhost=PASSWORD('%s');" | mysql -u root""" % DB_ROOT_PASSWORD, exitOnFail=False)
def convert_to_las(self, _file, _file_out, json_file): if not self.projection.srs: log.ODM_ERROR('Empty CRS: Could not convert to LAS') return kwargs = {'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'srs': self.projection.srs, 'json': json_file} # create pipeline file transform.xml to enable transformation pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "a_srs":"{srs}",' \ ' "offset_x":"{east}",' \ ' "offset_y":"{north}",' \ ' "offset_z":"0",' \ ' "filename":"transformed.las"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def startServer(settingsModule, host, port, pythonPath=None, block=False): command = "django-admin.py %s --settings='%s' runserver %s:%s" % ( pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule, host, port) if not block: command += " 1>/dev/null 2>/dev/null &" system.run(command)
def evaluate_function(self): """ Performs forward simulation to evaluate objective function """ self.write_model(path=PATH.FUNC, suffix='try') system.run('solver', 'eval_func', hosts='all', path=PATH.FUNC) self.sum_residuals(path=PATH.FUNC, suffix='try')
def initialize(self): """ Prepares for next model update iteration """ self.write_model(path=PATH.GRAD, suffix='new') print 'Generating synthetics' system.run('solver', 'eval_func', hosts='all', path=PATH.GRAD) self.sum_residuals(path=PATH.GRAD, suffix='new')
def evaluate_function(self): """ Calls forward solver and writes misfit """ self.prepare_model(path=PATH.FUNC, suffix='try') # forward simulation system.run('solver', 'eval_func', hosts='all', path=PATH.FUNC) self.sum_residuals(path=PATH.FUNC, suffix='try')
def evaluate_gradient(self): """ Calls adjoint solver and runs process_kernels """ # adjoint simulation system.run('solver', 'eval_grad', hosts='all', path=PATH.GRAD, export_traces=divides(self.iter, PAR.SAVETRACES)) postprocess.process_kernels( path=PATH.GRAD)
def convert_to_las(self, _file, pdalXML): if not self.epsg: log.ODM_ERROR('Empty EPSG: Could not convert to LAS') return kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'f_out': _file + '.las', 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'epsg': self.epsg, 'xml': pdalXML } # call txt2las # system.run('{bin}/txt2las -i {f_in} -o {f_out} -skip 30 -parse xyzRGBssss ' \ # '-set_scale 0.01 0.01 0.01 -set_offset {east} {north} 0 ' \ # '-translate_xyz 0 -epsg {epsg}'.format(**kwargs)) # # create pipeline file transform.xml to enable transformation pipelineXml = '<?xml version=\"1.0\" encoding=\"utf-8\"?>' pipelineXml += '<Pipeline version=\"1.0\">' pipelineXml += ' <Writer type=\"writers.las\">' pipelineXml += ' <Option name=\"filename\">' pipelineXml += ' transformed.las' pipelineXml += ' </Option>' pipelineXml += ' <Option name=\"a_srs\">' pipelineXml += ' EPSG:{epsg}'.format(**kwargs) pipelineXml += ' </Option>' pipelineXml += ' <Filter type=\"filters.transformation\">' pipelineXml += ' <Option name=\"matrix\">' pipelineXml += ' 1 0 0 {east}'.format(**kwargs) pipelineXml += ' 0 1 0 {north}'.format(**kwargs) pipelineXml += ' 0 0 1 0' pipelineXml += ' 0 0 0 1' pipelineXml += ' </Option>' pipelineXml += ' <Reader type=\"readers.ply\">' pipelineXml += ' <Option name=\"filename\">' pipelineXml += ' untransformed.ply' pipelineXml += ' </Option>' pipelineXml += ' </Reader>' pipelineXml += ' </Filter>' pipelineXml += ' </Writer>' pipelineXml += '</Pipeline>' with open(pdalXML, 'w') as f: f.write(pipelineXml) # call pdal system.run( '{bin}/pdal pipeline -i {xml} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def main(self): """ Generates data """ # clean directories self.clean_directory(PATH.OUTPUT) self.clean_directory(PATH.GLOBAL) print('Generating data...') #unix.mkdir(PATH.DATA) system.run('solver', 'setup', hosts='all') print('Finished')
def main(self): """ Generates seismic data """ print 'Running solver...' system.run('solver', 'generate_data', hosts='all', model_path=PATH.MODEL, model_type='gll', model_name='model') print "Finished\n"
def setup(self): """ Lays groundwork for inversion """ # clean scratch directories if PAR.BEGIN == 1: unix.rm(PATH.GLOBAL) unix.mkdir(PATH.GLOBAL) preprocess.setup() postprocess.setup() optimize.setup() system.run('solver', 'setup', hosts='all')
def install() : debugOutput("Installing") system.run("sudo easy_install -f http://www.nickblundell.org.uk/packages --upgrade wikidbase") # Create a suitable settings file and a place for the db with www-data access uploadsPath = os.path.join(WB_PATH, "files") # XXX: pops up no module named wikidbase wbSettings = getSettingsData() settingsFile = os.path.join(WB_PATH, "settings.py") databasePath = os.path.join(WB_PATH, "wbdata.db") cacheTable = "wbcache" # XXX: Adding these to end will cause settings.py logic to fail. # Set settings. wbSettings += """ DATABASE_ENGINE = "sqlite3" DATABASE_NAME="%s" CACHE_BACKEND="db://%s" UPLOAD_FOLDER="%s" """ % (databasePath, cacheTable, uploadsPath) # Create folders and set permissions. system.run("mkdir -p '%s'" % uploadsPath) system.writeToFile(settingsFile, wbSettings) system.run("chgrp -R www-data '%s'" % WB_PATH) system.run("chmod -R g+rwx '%s'" % WB_PATH) # Initialise app sbdjango.initialiseApp("settings",DEFAULT_ADMIN_CREDENTIALS[0], DEFAULT_ADMIN_CREDENTIALS[1], DEFAULT_ADMIN_CREDENTIALS[2], pythonPath=WB_PATH, cacheTable=cacheTable) # Do mod_python setup apt.install("libapache2-mod-python")
def initialize(self): """ Prepares for next model update iteration """ isready = self.solver_status() if not isready: print 'Generating synthetics' self.prepare_model(path=PATH.GRAD, suffix='new') system.run('solver', 'eval_func', hosts='all', path=PATH.GRAD) self.sum_residuals(path=PATH.GRAD, suffix='new')
def evaluate_gradient(self): """ Performs adjoint simulation to evaluate gradient """ system.run('solver', 'eval_grad', hosts='all', path=PATH.GRAD, export_traces=divides(optimize.iter, PAR.SAVETRACES)) postprocess.write_gradient( path=PATH.GRAD) src = join(PATH.GRAD, 'gradient') dst = join(PATH.OPTIMIZE, 'g_new') savenpy(dst, solver.merge(solver.load(src, suffix='_kernel')))
def convert_to_dem(self, _file, _file_out, pdalJSON, sample_radius, gdal_res, gdal_radius): # Check if exists f_in if not io.file_exists(_file): log.ODM_ERROR('LAS file does not exist') return False kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'sample_radius': sample_radius, 'gdal_res': gdal_res, 'gdal_radius': gdal_radius, 'f_out': _file_out, 'json': pdalJSON } pipelineJSON = '{{' \ ' "pipeline":[' \ ' "input.las",' \ ' {{' \ ' "type":"filters.sample",' \ ' "radius":"{sample_radius}"' \ ' }},' \ ' {{' \ ' "type":"filters.pmf"' \ ' }},' \ ' {{' \ ' "type":"filters.range",' \ ' "limits":"Classification[2:2]"' \ ' }},' \ ' {{' \ ' "resolution": {gdal_res},' \ ' "radius": {gdal_radius},' \ ' "output_type":"idw",' \ ' "filename":"outputfile.tif"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(pdalJSON, 'w') as f: f.write(pipelineJSON) system.run('{bin}/pdal pipeline {json} --readers.las.filename={f_in} ' '--writers.gdal.filename={f_out}'.format(**kwargs)) if io.file_exists(kwargs['f_out']): return True else: return False
def evaluate_gradient(self): """ Performs adjoint simulation to evaluate gradient """ system.run('solver', 'eval_grad', hosts='all', path=PATH.GRAD, export_traces=divides(optimize.iter, PAR.SAVETRACES)) postprocess.write_gradient(path=PATH.GRAD) src = join(PATH.GRAD, 'gradient') dst = join(PATH.OPTIMIZE, 'g_new') savenpy(dst, solver.merge(solver.load(src, suffix='_kernel')))
def main(self): """ Generates seismic data """ print 'Running solver...' system.run('solver', 'generate_data', hosts='all', model_path=PATH.MODEL, model_type='gll', model_name='model') print "Finished"
def compute_gradient(self): """ Compute gradients. Designed to avoid excessive storage of boundary files. """ # output for inversion history unix.mkdir(join(PATH.OUTPUT, iter_dirname(optimize.iter))) # compute gradients system.run('solver', 'compute_gradient', hosts='all') postprocess.write_gradient(PATH.GRAD) # evaluate misfit function self.sum_residuals(path=PATH.SOLVER, suffix='new')
def install() : debugOutput("Installing") if apt.isInstalled("apache2") : debugOutput("apache2 is installed") interface.updateProgress("Installing apache2") apt.install("apache2") # Enable mod_proxy. system.run("a2enmod proxy", exitOnFail=False) system.run("a2enmod proxy_http", exitOnFail=False) # Disable default apache site. setSiteEnabled("default",False)
def install(): debugOutput("Installing") if apt.isInstalled("apache2"): debugOutput("apache2 is installed") interface.updateProgress("Installing apache2") apt.install("apache2") # Enable mod_proxy. system.run("a2enmod proxy", exitOnFail=False) system.run("a2enmod proxy_http", exitOnFail=False) # Disable default apache site. setSiteEnabled("default", False)
def get_run_output(command: str): import tempfile temp_filename = tempfile.mktemp('.temp') del tempfile run(command, temp_filename) with open(temp_filename) as f: file_data = f.read() f.close() os.remove(temp_filename) return file_data
def convert_to_las(self, _file, pdalXML): if not self.epsg: log.ODM_ERROR('Empty EPSG: Could not convert to LAS') return kwargs = {'bin': context.pdal_path, 'f_in': _file, 'f_out': _file + '.las', 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'epsg': self.epsg, 'xml': pdalXML} # call txt2las # system.run('{bin}/txt2las -i {f_in} -o {f_out} -skip 30 -parse xyzRGBssss ' \ # '-set_scale 0.01 0.01 0.01 -set_offset {east} {north} 0 ' \ # '-translate_xyz 0 -epsg {epsg}'.format(**kwargs)) # # create pipeline file transform.xml to enable transformation pipelineXml = '<?xml version=\"1.0\" encoding=\"utf-8\"?>' pipelineXml += '<Pipeline version=\"1.0\">' pipelineXml += ' <Writer type=\"writers.las\">' pipelineXml += ' <Option name=\"filename\">' pipelineXml += ' transformed.las' pipelineXml += ' </Option>' pipelineXml += ' <Filter type=\"filters.transformation\">' pipelineXml += ' <Option name=\"matrix\">' pipelineXml += ' 1 0 0 {east}'.format(**kwargs) pipelineXml += ' 0 1 0 {north}'.format(**kwargs) pipelineXml += ' 0 0 1 0' pipelineXml += ' 0 0 0 1' pipelineXml += ' </Option>' pipelineXml += ' <Reader type=\"readers.ply\">' pipelineXml += ' <Option name=\"filename\">' pipelineXml += ' untransformed.ply' pipelineXml += ' </Option>' pipelineXml += ' </Reader>' pipelineXml += ' </Filter>' pipelineXml += ' </Writer>' pipelineXml += '</Pipeline>' with open(pdalXML, 'w') as f: f.write(pipelineXml) # call pdal system.run('{bin}/pdal pipeline -i {xml} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def initialize(first_init=False): if first_init: clear() logger.info(locale.detected_os % platform.system()) logger.info(locale.installing) required_packages = [pkg.rstrip('\n').lower() for pkg in open('requirements.txt').readlines()] installed_packages = [pkg[0].lower() for pkg in get_pip_info()] for package in required_packages: if package in installed_packages: continue if run(f'pip3 install {package}') == 0: logger.info(locale.installed % package) else: logger.info(locale.not_installed % package) logger.info(locale.crt_workspace) [[os.makedirs(f'SC/{i}-{k}', exist_ok=True) for k in ['Compressed', 'Decompressed', 'Sprites']] for i in ['In', 'Out']] [[os.makedirs(f'CSV/{i}-{k}', exist_ok=True) for k in ['Compressed', 'Decompressed']] for i in ['In', 'Out']] logger.info(locale.verifying) config.initialized = True try: import requests del requests config.version = get_tags('vorono4ka', 'xcoder')[0]['name'][1:] except ImportError as exception: logger.exception(exception) config.dump() if first_init: input(locale.to_continue)
def infer(): text = flask.request.args.get('q', '') dis = flask.request.args.get('display', '') # print(text) # print(dis) result = system.run(text, dis) return result
def convert_to_dem(self, _file, _file_out, pdalJSON, sample_radius, gdal_res, gdal_radius): # Check if exists f_in if not io.file_exists(_file): log.ODM_ERROR('LAS file does not exist') return False kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'sample_radius': sample_radius, 'gdal_res': gdal_res, 'gdal_radius': gdal_radius, 'f_out': _file_out, 'json': pdalJSON } pipelineJSON = '{{' \ ' "pipeline":[' \ ' "input.las",' \ ' {{' \ ' "type":"filters.sample",' \ ' "radius":"{sample_radius}"' \ ' }},' \ ' {{' \ ' "type":"filters.pmf",' \ ' "extract":"true"' \ ' }},' \ ' {{' \ ' "resolution": {gdal_res},' \ ' "radius": {gdal_radius},' \ ' "output_type":"idw",' \ ' "filename":"outputfile.tif"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(pdalJSON, 'w') as f: f.write(pipelineJSON) system.run('{bin}/pdal pipeline {json} --readers.las.filename={f_in} ' '--writers.gdal.filename={f_out}'.format(**kwargs)) if io.file_exists(kwargs['f_out']): return True else: return False
def process_kernels(self, path, parameters): """ Processes kernels in accordance with parameter settings """ fullpath = path + '/' + 'kernels' assert exists(path) if exists(fullpath + '/' + 'sum'): unix.mv(fullpath + '/' + 'sum', fullpath + '/' + 'sum_nofix') # mask sources and receivers system.run('postprocess', 'fix_near_field', hosts='all', path=fullpath) system.run('solver', 'combine', hosts='head', path=fullpath, parameters=parameters)
def initialiseApp(settingsModule, user, password, email, pythonPath=None, cacheTable=None) : syncdbEvents = { "yes/no":"yes\n", "Username":"******" % user, "E-mail address":"%s\n" % email, "Password":"******" % password, } system.runExpect("django-admin.py %s --settings='%s' syncdb" % (pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule), events=syncdbEvents, timeout=60) # Create cache table if cacheTable : # XXX: really we should not run this if the db file has content. try : system.run("django-admin.py %s --settings='%s' createcachetable %s" % (pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule, cacheTable)) except : pass
def addUser(username, password): addSambaUser(username, password) # Add a directory for the user. userDir = os.path.join(SAMBA_USERS_PATH, username) system.run("""mkdir -p "%s" """ % userDir) system.run("""chown %s "%s" """ % (username, userDir)) system.run("""chgrp %s "%s" """ % ("users", userDir)) system.run("""chmod 700 "%s" """ % userDir)
def addUser(username, password) : addSambaUser(username, password) # Add a directory for the user. userDir = os.path.join(SAMBA_USERS_PATH, username) system.run("""mkdir -p "%s" """ % userDir) system.run("""chown %s "%s" """ % (username, userDir)) system.run("""chgrp %s "%s" """ % ("users", userDir)) system.run("""chmod 700 "%s" """ % userDir)
def setup(self): """ Lays groundwork for inversion """ # clean scratch directories if PAR.BEGIN == 1: unix.rm(PATH.GLOBAL) unix.mkdir(PATH.GLOBAL) preprocess.setup() postprocess.setup() optimize.setup() if PATH.DATA: print('Copying data...') else: print('Generating data...') system.run('solver', 'setup', hosts='all')
def deleteShare(shareName): debugOutput("Deleting share %s" % shareName) if shareName not in getShares(): return group = groupNameFromShare(shareName) sharePath = os.path.join(SAMBA_SHARES_PATH, shareName) system.run("""rm -rf "%s" """ % sharePath, exitOnFail=False) system.run("""delgroup "%s" """ % group, exitOnFail=False) # Remove smb.conf entry sambaConfig = system.readFromFile(SAMBA_CONFIG) match = re.search(r"\[%s\][^[]+" % shareName, sambaConfig, re.DOTALL) if match: sambaConfig = sambaConfig.replace(match.group(0), "").strip() system.writeToFile(SAMBA_CONFIG, sambaConfig) restart()
def setup(self): """ Lays groundwork for inversion """ # clean scratch directories if PAR.BEGIN == 1: unix.rm(PATH.SCRATCH) unix.mkdir(PATH.SCRATCH) preprocess.setup() postprocess.setup() optimize.setup() if PATH.DATA: print 'Copying data' else: print 'Generating data' system.run('solver', 'setup', hosts='all')
def deleteShare(shareName) : debugOutput("Deleting share %s" % shareName) if shareName not in getShares() : return group = groupNameFromShare(shareName) sharePath = os.path.join(SAMBA_SHARES_PATH, shareName) system.run("""rm -rf "%s" """ % sharePath, exitOnFail=False) system.run("""delgroup "%s" """ % group, exitOnFail=False) # Remove smb.conf entry sambaConfig = system.readFromFile(SAMBA_CONFIG) match = re.search(r"\[%s\][^[]+" % shareName, sambaConfig, re.DOTALL) if match : sambaConfig = sambaConfig.replace(match.group(0),"").strip() system.writeToFile(SAMBA_CONFIG, sambaConfig) restart()
def process_kernels(self, path, parameters): """ Processes kernels in accordance with parameter settings """ fullpath = path +'/'+ 'kernels' assert exists(path) if exists(fullpath +'/'+ 'sum'): unix.mv(fullpath +'/'+ 'sum', fullpath +'/'+ 'sum_nofix') # mask sources and receivers system.run('postprocess', 'fix_near_field', hosts='all', path=fullpath) system.run('solver', 'combine', hosts='head', path=fullpath, parameters=parameters)
def run_model(): start_state = (monkey_combo.get(), box_combo.get(), banana_combo.get(), is_on_box_combo.get()) set_img(start_state) system = run(start_state) for idx, (state, route) in enumerate(zip(system.states, system.routes)): time.sleep(Rate / 1000) # 这里采用暴力的阻塞式睡眠 set_img(state) set_txt(idx, state, route) root.update() return system.states, system.routes
def setup(self): """ Lays groundwork for inversion """ # clean scratch directories if PAR.BEGIN == 1: unix.rm(PATH.SCRATCH) unix.mkdir(PATH.SCRATCH) preprocess.setup() postprocess.setup() optimize.setup() isready = self.solver_status() if not isready: if PATH.DATA: print 'Copying data...' else: print 'Generating data...' system.run('solver', 'setup', hosts='all')
def addShare(share, group=None, description=None, public=False): debugOutput("Adding Samba share '%s'" % (share)) if not group: group = groupNameFromShare(share) sharePath = os.path.join(SAMBA_SHARES_PATH, share) if share in getShares(): if not smartbox.options.testRun: interface.abort( "The share '%s' exists or has been defined already" % sharePath) if group not in system.readFromFile("/etc/group"): system.addGroup(group) if not os.path.exists(sharePath): system.run("""mkdir -p "%s" """ % sharePath, exitOnFail=False) system.run("""chgrp %s "%s" """ % (group, sharePath)) system.run("""chmod 6770 "%s" """ % sharePath) shareConfig = _createShareConfig(share, sharePath, group, description) sambaConfig = system.readFromFile(SAMBA_CONFIG) sambaConfig += "\n\n%s" % shareConfig sambaConfig = setShareDetails(share, description, public=public, sambaConfig=sambaConfig) system.writeToConfigFile("/etc/samba/smb.conf", sambaConfig) restart()
def addShare(share, group=None, description=None, public=False) : debugOutput("Adding Samba share '%s'" % (share)) if not group : group = groupNameFromShare(share) sharePath = os.path.join(SAMBA_SHARES_PATH, share) if share in getShares(): if not smartbox.options.testRun : interface.abort("The share '%s' exists or has been defined already" % sharePath) if group not in system.readFromFile("/etc/group") : system.addGroup(group) if not os.path.exists(sharePath) : system.run("""mkdir -p "%s" """ % sharePath, exitOnFail = False) system.run("""chgrp %s "%s" """ % (group, sharePath)) system.run("""chmod 6770 "%s" """ % sharePath) shareConfig = _createShareConfig(share, sharePath, group, description) sambaConfig = system.readFromFile(SAMBA_CONFIG) sambaConfig += "\n\n%s" % shareConfig sambaConfig = setShareDetails(share, description, public=public, sambaConfig=sambaConfig) system.writeToConfigFile("/etc/samba/smb.conf",sambaConfig) restart()
def setAdminPassword(password): # Set samba admin password samba.setPassword("root", password) # Set web interface admin password system.run("htpasswd -bc %s '%s' '%s'" % (PASSWORD_FILE, "Administrator", password)) system.run("chgrp www-data '%s'" % PASSWORD_FILE) system.run("chmod 740 '%s'" % PASSWORD_FILE)
def convert_to_las(self, _file, _file_out, json_file): if not self.projection.srs: log.ODM_ERROR('Empty CRS: Could not convert to LAS') return kwargs = { 'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'srs': self.projection.srs, 'json': json_file } # create pipeline file las.json to write odm_georeferenced_model.laz point cloud pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "type":"writers.las",' \ ' "a_srs":"{srs}",' \ ' "offset_x":"{east}",' \ ' "offset_y":"{north}",' \ ' "offset_z":"0",' \ ' "compression":"laszip",' \ ' "filename":"{f_out}"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run( '{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in}'. format(**kwargs))
def convert_to_las(self, _file, _file_out, json_file): if not self.epsg: log.ODM_ERROR('Empty EPSG: Could not convert to LAS') return kwargs = {'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'epsg': self.epsg, 'json': json_file} # create pipeline file transform.xml to enable transformation pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "type":"filters.transformation",' \ ' "matrix":"1 0 0 {east} 0 1 0 {north} 0 0 1 0 0 0 0 1"' \ ' }},' \ ' {{' \ ' "a_srs":"EPSG:{epsg}",' \ ' "offset_x":"{east}",' \ ' "offset_y":"{north}",' \ ' "offset_z":"0",' \ ' "filename":"transformed.las"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def convert_to_las(self, _file, _file_out, json_file): if not self.epsg: log.ODM_ERROR('Empty EPSG: Could not convert to LAS') return kwargs = {'bin': context.pdal_path, 'f_in': _file, 'f_out': _file_out, 'east': self.utm_east_offset, 'north': self.utm_north_offset, 'epsg': self.epsg, 'json': json_file} # create pipeline file transform.xml to enable transformation pipeline = '{{' \ ' "pipeline":[' \ ' "untransformed.ply",' \ ' {{' \ ' "type":"filters.transformation",' \ ' "matrix":"1 0 0 {east} 0 1 0 {north} 0 0 1 0 0 0 0 1"' \ ' }},' \ ' {{' \ ' "a_srs":"EPSG:{epsg}",' \ ' "forward":"scale",' \ ' "filename":"transformed.las"' \ ' }}' \ ' ]' \ '}}'.format(**kwargs) with open(json_file, 'w') as f: f.write(pipeline) # call pdal system.run('{bin}/pdal pipeline -i {json} --readers.ply.filename={f_in} ' '--writers.las.filename={f_out}'.format(**kwargs))
def setup(self): """ Lays groundwork for inversion """ # clean scratch directories if PAR.BEGIN == 1: unix.rm(PATH.GLOBAL) unix.mkdir(PATH.GLOBAL) # set up optimization optimize.setup() # set up pre- and post-processing preprocess.setup() postprocess.setup() # set up solver if PAR.BEGIN == 1: system.run('solver', 'setup', hosts='all') return if PATH.LOCAL: system.run('solver', 'setup', hosts='all')
def deleteUser(username): if username not in getUsers(): return userDir = os.path.join(SAMBA_USERS_PATH, username) profileDir = os.path.join(SAMBA_HOME, "profiles", username) system.run("""smbpasswd -x "%s" """ % username) system.run("""rm -rf "%s" """ % userDir) system.run("""rm -rf "%s" """ % profileDir)
def deleteUser(username) : if username not in getUsers() : return userDir = os.path.join(SAMBA_USERS_PATH, username) profileDir = os.path.join(SAMBA_HOME, "profiles", username) system.run("""smbpasswd -x "%s" """ % username) system.run("""rm -rf "%s" """ % userDir) system.run("""rm -rf "%s" """ % profileDir)
def main(self): """ Migrates seismic data """ # prepare directory structure unix.rm(PATH.GLOBAL) unix.mkdir(PATH.GLOBAL) # set up workflow machinery preprocess.setup() postprocess.setup() # set up solver machinery print 'Preparing solver...' system.run('solver', 'setup', hosts='all') self.prepare_model() # perform migration print 'Generating synthetics...' system.run('solver', 'eval_func', hosts='all', path=PATH.GLOBAL) print 'Backprojecting data...' system.run('solver', 'eval_grad', hosts='all', path=PATH.GLOBAL, export_traces=PAR.SAVETRACES) postprocess.combine_kernels( path=PATH.GLOBAL, parameters=solver.parameters) try: postprocess.combine_kernels( path=PATH.GLOBAL, parameters=['rhop']) except: pass if PAR.SAVETRACES: self.save_traces() if PAR.SAVEKERNELS: self.save_kernels() else: self.save_kernels_sum() print 'Finished\n'
def main(self): unix.rm(PATH.SCRATCH) unix.mkdir(PATH.SCRATCH) preprocess.setup() print 'SIMULATION 1 OF 3' system.run('solver', 'setup', hosts='all') print 'SIMULATION 2 OF 3' self.prepare_model() system.run('solver', 'eval_func', hosts='all', path=PATH.SCRATCH) print 'SIMULATION 3 OF 3' system.run('solver', 'eval_grad', hosts='all', path=PATH.SCRATCH) # collect traces obs = join(PATH.SOLVER, self.event, 'traces/obs') syn = join(PATH.SOLVER, self.event, 'traces/syn') adj = join(PATH.SOLVER, self.event, 'traces/adj') obs,_ = preprocess.load(obs) syn,_ = preprocess.load(syn) adj,_ = preprocess.load(adj, suffix='.su.adj') # collect model and kernels model = solver.load(PATH.MODEL_INIT) kernels = solver.load(PATH.SCRATCH+'/'+'kernels'+'/'+self.event, suffix='_kernel') # dot prodcut in data space keys = obs.keys() LHS = DotProductLHS(keys, syn, adj) # dot product in model space keys = ['rho', 'vp', 'vs'] # model.keys() RHS = DotProductRHS(keys, model, kernels) print print 'LHS:', LHS print 'RHS:', RHS print 'RELATIVE DIFFERENCE:', (LHS-RHS)/RHS print
def install(backupSources=None) : debugOutput("Installing rsnapshot") apt.install("rsnapshot") # TODO: Sort out config file handling. system.run("mkdir /.rsnapshot", exitOnFail=False) system.run("chmod +rx /.rsnapshot") rsnapshotConf = system.readFromFile("/etc/rsnapshot.conf") rsnapshotConf = rsnapshotConf.replace("snapshot_root\t/var/cache/rsnapshot/","snapshot_root\t/.rsnapshot/") #rsnapshotConf = rsnapshotConf.replace("#no_create_root\t1","no_create_root\t0") rsnapshotConf = rsnapshotConf.replace("#interval","interval") rsnapshotConf = rsnapshotConf.replace("\nbackup","\n#backup") rsnapshotConf += "\n\n" for backupSource in backupSources : rsnapshotConf += "backup\t%s/\tbackups/\n" % backupSource.rstrip("/") rsnapshotConf += "backup\t/home/\t.system/\n" rsnapshotConf += "backup\t/etc/\t.system/\n" debugOutput(rsnapshotConf) system.writeToConfigFile("/etc/rsnapshot.conf",rsnapshotConf) cronConf = system.readFromFile("/etc/cron.d/rsnapshot") cronConf = cronConf.replace("# 0","0") cronConf = cronConf.replace("# 30","30") cronConf = cronConf.replace("*/4","*") debugOutput(cronConf) system.writeToConfigFile("/etc/cron.d/rsnapshot", cronConf) interface.updateProgress() system.run("rsnapshot hourly") interface.updateProgress()
def generateLogonScript() : import socket try : username = smartbox.options.args[-1] except : interface.abort("You must enter a username to generate a login script.") debugOutput("Generating logon.bat for '%s'." % username) hostName = socket.gethostname() loginScript = "echo off\nnet use /persistent:no\n" shareFolder = SAMBA_SHARES_PATH for share in os.listdir(shareFolder) : sharePath = os.path.join(shareFolder, share) debugOutput("Checking permissions on '%s' " % sharePath) try : shareGroup = grp.getgrgid(os.stat(sharePath).st_gid) debugOutput("shareGroup: %s members %s" % (shareGroup, shareGroup.gr_mem)) if username in shareGroup.gr_mem or shareGroup.gr_name == "users": loginScript += """net use * "\\\\%s\\%s"\n""" % (hostName, share) except : pass # Add the backup share loginScript += """net use * "\\\\%s\\%s"\n""" % (hostName, "backups") debugOutput(loginScript) # Use DOS file endings. loginScript = loginScript.replace("\n","\r\n") scriptFile = os.path.join(SAMBA_HOME, "netlogon", "%s.bat" % username) system.writeToFile(scriptFile, loginScript) system.run("""chown "%s" "%s" """ % (username, scriptFile)) system.run("""chgrp "%s" "%s" """ % ("users", scriptFile)) system.run("""chmod 700 "%s" """ % scriptFile)
def process_kernels(self, path, parameters): if PAR.SMOOTH > 0.: system.run('solver', 'smooth', hosts='head', precond=PAR.MASK, span=PAR.SMOOTH)
def setSiteEnabled(site, enabled) : if enabled : system.run("a2ensite %s" % site, exitOnFail = False) else : system.run("a2dissite %s" % site, exitOnFail = False) restart()
def startServer(settingsModule, host, port, pythonPath=None, block=False) : command = "django-admin.py %s --settings='%s' runserver %s:%s" % (pythonPath and "--pythonpath='%s'" % (pythonPath) or "", settingsModule, host, port) if not block : command += " 1>/dev/null 2>/dev/null &" system.run(command)
def stopServer(host, port) : system.run("""pkill -f 'runserver %s:%s'""" % (host, port), exitOnFail=False)