def __init__(self, name, start_calico=True, as_num=None): """ Create a container using an image made for docker-in-docker. Load saved images into it. """ self.name = name self.as_num = None pwd = sh.pwd().stdout.rstrip() docker.run("--privileged", "-v", pwd+":/code", "--name", self.name, "-tid", "jpetazzo/dind") # Since `calicoctl node` doesn't fix ipv6 forwarding and module loading, we must manually fix it self.calicoctl("checksystem --fix") self.ip = docker.inspect("--format", "{{ .NetworkSettings.IPAddress }}", self.name).stdout.rstrip() self.ip6 = docker.inspect("--format", "{{ .NetworkSettings.GlobalIPv6Address }}", self.name).stdout.rstrip() # Make sure docker is up docker_ps = partial(self.execute, "docker ps") retry_until_success(docker_ps, ex_class=ErrorReturnCode) self.execute("docker load --input /code/calico_containers/calico-node.tar") self.execute("docker load --input /code/calico_containers/busybox.tar") if start_calico: self.start_calico_node() self.assert_powerstrip_up()
class ProvisionWorkflowForm(Form): # print # "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++",Form filename = "abc" dir = path_expand( cm_config_server().get("cloudmesh.server.workflows.path")) # not so nice cludge, ask for location of statcic instead web_pwd = pwd().strip() basename = "/static/{0}/{1}".format(dir, filename) print "BBBB", basename try: with open("{2}/{0}.{1}".format(basename, "diag", web_pwd), "r") as f: data = f.readlines()[1:-1] default = "".join(data) except: print "Error: diagram not found" default = "" default = default.split("//graph") filename = TextField("Filename", default=filename) # properties = TextAreaField("Workflow", default=default[0]) # workflow = TextAreaField("Workflow", default=default[1]) properties = TextAreaField("Workflow", default="fake") workflow = TextAreaField("Workflow", default="fake")
def git_clone_to_local(dest_directory, webhook_data): git = sh.git.bake() logger.debug('Making destination directory %s' % dest_directory) print('Making destination directory %s' % dest_directory) sh.mkdir('-p', dest_directory) sh.cd(dest_directory) logger.debug("checking for repo_name %s in %s" % (webhook_data.repo_name, sh.pwd())) if not os.path.exists(webhook_data.repo_name): logger.debug("Cloning new repository") print(git.clone(webhook_data.repo_url, webhook_data.repo_name)) sh.cd(webhook_data.repo_name) print(git.fetch('--all')) try: git('show-ref', '--heads', webhook_data.branch_name) branch_exists = True except: branch_exists = False if branch_exists is False and not webhook_data.is_tag(): print( git.checkout('-b', webhook_data.branch_name, 'origin/%s' % webhook_data.branch_name)) elif branch_exists: git.checkout(webhook_data.branch_name) print(git.pull()) print webhook_data.before, webhook_data.after
def guessDefaultDiagramLocation(self): arr_dir_current = pwd().strip().split("/") # current py file: cloudmesh_home/cloudmesh/rack/*.py # cloudmesh_web static dir: cloudmesh_home/cloudmesh_web/static arr_dir_guess = arr_dir_current[ 0:-2] + ["cloudmesh_web", "static", "racks"] return "/".join(arr_dir_guess)
def transfer(event_path_this_rank): root_path = str(sh.pwd())[:-1] for thedir in event_path_this_rank: sh.cd(thedir) stdin_list = [] for sacfile in glob.glob("*.SAC"): net, sta, loc, chn = sacfile.split('.')[0:4] pz = glob.glob(f"SAC_PZs_{net}_{sta}_{chn}_{loc}_*_*") if (len(pz) != 1): logger.error( f"[rank:{rank},dir:{thedir}] error in transfering for {sacfile} in seeking {pz}" ) continue # logger.info( # f"[rank:{rank},dir:{thedir}] transfer {sacfile} with {pz}") stdin_list.append(f"r {sacfile}\n") stdin_list.append(f"rmean; rtr; taper \n") stdin_list.append( f"trans from pol s {pz[0]} to none freq 0.001 0.005 5 10\n") stdin_list.append(f"mul 1.0e9 \n") stdin_list.append("w over\n") stdin_list.append(f"q\n") sh.sac(_in=stdin_list) sh.cd(root_path)
def do_smoothing(kernel_process_directory, sigma_h, sigma_v, input_dir, output_dir, n_tasks): """ do_smoothing: perform smoothing for the summed kernel. (use the workflow order in our lab) """ # * the commented part is for using smoother in specfem, which could be very slow # result = "" # to_smooth_kernel_names = [ # "bulk_c_kernel", "bulk_betav_kernel", "bulk_betah_kernel", "eta_kernel"] # current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable # result += f"cd {kernel_process_directory};" # for each_name in to_smooth_kernel_names: # result += f"ibrun ./bin/xsmooth_sem {sigma_h} {sigma_v} {each_name} {input_dir} {output_dir};" # result += f"cd {current_path};\n" # * if we use the smoother in sem_utils result = "" # * load the required netcdf module result += "module load netcdf; \n" to_smooth_kernel_names = [ "bulk_c_kernel", "bulk_betav_kernel", "bulk_betah_kernel", "eta_kernel" ] to_smooth_kernel_names = ",".join(to_smooth_kernel_names) current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" result += f"ibrun ./sem_utils_bin/xsem_smooth {n_tasks} {join(kernel_process_directory,'topo')} {input_dir} {to_smooth_kernel_names} {sigma_h} {sigma_v} {output_dir} _smooth;" result += f"cd {current_path};\n" return result
def gitrepo(root): tmpdir = sh.pwd().strip() sh.cd(root) gitlog = sh.git('--no-pager', 'log', '-1', pretty="format:%s" % FORMAT).split('\n', 5) branch = os.environ.get('CIRCLE_BRANCH') or os.environ.get( 'TRAVIS_BRANCH', sh.git('rev-parse', '--abbrev-ref', 'HEAD').strip()) remotes = [ x.split() for x in filter(lambda x: x.endswith('(fetch)'), sh.git.remote('-v').strip().splitlines()) ] sh.cd(tmpdir) return { "head": { "id": gitlog[0], "author_name": gitlog[1], "author_email": gitlog[2], "committer_name": gitlog[3], "committer_email": gitlog[4], "message": gitlog[5].strip(), }, "branch": branch, "remotes": [{ 'name': remote[0], 'url': remote[1] } for remote in remotes] }
def gitrepo(root): '''Construct a dictionary holding all the Git data that can be found.''' oldpwd = sh.pwd().strip() sh.cd(root) gitlog = sh.git('--no-pager', 'log', '-1', pretty="format:%s" % FORMAT).split('\n', 7) branch = (os.environ.get('CIRCLE_BRANCH') or os.environ.get('TRAVIS_BRANCH', sh.git('rev-parse', '--abbrev-ref', 'HEAD').strip())) remotes = [x.split() for x in sh.git.remote('-v').strip().splitlines() if x.endswith('(fetch)')] sh.cd(oldpwd) return { "head": { "id": gitlog[0], "author_name": gitlog[1], "author_email": gitlog[2], "author_timestamp": gitlog[3], "committer_name": gitlog[4], "committer_email": gitlog[5], "committer_timestamp": gitlog[6], "message": gitlog[7].strip(), }, "branch": branch, "remotes": [{'name': r[0], 'url': r[1]} for r in remotes] }
def itern_generate_perturbed_kernel(kernel_process_directory, perturbed_value, pyexec): """ itern_generate_perturbed_kernel: generate the perturbed model for the following steps. """ result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" # when we run add_model_globe_tiso, INPUT_MODEL(from previous gll directory), INPUT_GRADIENT(from ln smooth), topo(from database) have all been established. result += f"ibrun ./bin/xadd_model_tiso_cg {perturbed_value};" # we should move the kernel files in OUTPUT_MODEL to perturbed_{perturbed_value}_for_line_search result += f"mkdir -p perturbed_{perturbed_value}_for_line_search;" result += f"mkdir -p gll_for_perturbed_{perturbed_value}_for_line_search;" result += f"mv OUTPUT_MODEL/* perturbed_{perturbed_value}_for_line_search/;" # ! here we have to make the gll directory # fix a bug here, as ln_new_model_to_gll will call seisflow result += f"cd {current_path};\n" result += ln_new_model_to_gll( pyexec, join(kernel_process_directory, f"perturbed_{perturbed_value}_for_line_search"), join(kernel_process_directory, f"gll_for_perturbed_{perturbed_value}_for_line_search")) result += f"cd {current_path};\n" return result
def git_clone_to_local(dest_directory, webhook_data): git = sh.git.bake() logger.debug('Making destination directory %s' % dest_directory) print ('Making destination directory %s' % dest_directory) sh.mkdir('-p', dest_directory) sh.cd(dest_directory) logger.debug("checking for repo_name %s in %s" % (webhook_data.repo_name, sh.pwd())) if not os.path.exists(webhook_data.repo_name): logger.debug("Cloning new repository") print(git.clone(webhook_data.repo_url, webhook_data.repo_name)) sh.cd(webhook_data.repo_name) print(git.fetch('--all')) try: git('show-ref', '--heads', webhook_data.branch_name) branch_exists = True except: branch_exists = False if branch_exists is False and not webhook_data.is_tag(): print(git.checkout('-b', webhook_data.branch_name, 'origin/%s' % webhook_data.branch_name)) elif branch_exists: git.checkout(webhook_data.branch_name) print(git.pull()) print webhook_data.before, webhook_data.after
def __init__(self, server:dict, intersectionList:list, logger:Logger): self.logger = logger self.serverDataDirectory = server["data_directory"] self.intersectionList = intersectionList self.dataElements = ["spat", "srm", "remoteBsm", "ssm", "msgCount"] self.dataElementFiles = {"spat" : None, "srm": None, "remoteBsm": None, "ssm": None, "msgCount": None} self.workingDirectory = str(sh.pwd())[:-1]
def guessDefaultDiagramLocation(self): arr_dir_current = pwd().strip().split("/") # current py file: cloudmesh_home/cloudmesh/rack/*.py # cloudmesh_web static dir: cloudmesh_home/cloudmesh_web/static arr_dir_guess = arr_dir_current[0:-2] + [ "cloudmesh_web", "static", "racks" ] return "/".join(arr_dir_guess)
def rdseed(event_path_this_rank): root_path = str(sh.pwd())[:-1] for thedir in event_path_this_rank: sh.cd(thedir) for seed in glob.glob("*SEED"): logger.info(f"[rank:{rank},dir:{thedir}] rdseed {seed}") sh.rdseed('-pdf', seed) sh.cd(root_path)
def get_working_directory(self): """ Use the ``sh`` library to return the current working directory using the unix command ``pwd``. :returns: str """ return '{0}'.format(pwd()).strip()
def rm_single_event(event_directory): """ rm the extracted directory """ current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable sh.cd(event_directory) sh.rm("-rf", "SAC") sh.cd(current_path)
def unzip_single_event(event_directory): """ unzip the sac files in a directory """ current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable sh.cd(event_directory) sh.tar("xvf", "SAC.tar.gz") sh.cd(current_path)
def absolute_path(path): """ Returns the absolute path given a relative (or absolute) path. Also handles the use of '~' for the home directory. """ path = re.sub('~', os.environ['HOME'], str(path)) if path[0] != '/': path = str(sh.pwd()).strip() + '/' + path return path
def cwd(path): previous_pwd = pwd().rstrip() try: cd(path) except OSError: raise InvalidDirectoryPath( 'WARNING: did not change working directory to %s' % str(path)) yield cd(previous_pwd)
def copy_files(self): """ Copy the LICENSE and CONTRIBUTING files to each folder repo """ files = ['LICENSE.md', 'CONTRIBUTING.md'] this_dir = sh.pwd().strip() for _file in files: sh.cp( '{0}/templates/{1}'.format(this_dir, _file), '{0}/'.format(self.book.textdir) )
def deploy(name, fn=None, bucket='lambda_methods'): print 'Preparing lambda method:', name orig_dir = sh.pwd().strip() dirname = '{}/{}'.format(orig_dir, name) zip_name = '{}/{}.zip'.format(dirname, name) if os.path.exists( dirname ): sh.rm('-rf', dirname) # cp skeleton project data sh.cp('-r', os.path.join(os.path.dirname(__file__), 'project'), dirname) base_zip = '{}/dist.zip'.format(dirname) if not os.path.exists(base_zip): _docker('--rm', '-v', '{}:/app'.format(dirname), 'quay.io/pypa/manylinux1_x86_64', '/app/scripts/build.sh') sh.zip('-9', zip_name, '-j', '{}/README.md'.format(dirname)) sh.cd(os.path.join(dirname, 'build')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(dirname) else: sh.mv( base_zip, zip_name ) if fn is not None: with open(os.path.join(dirname, 'src', 'custom.py'), 'w') as fh: fh.write(fn) sh.cp(os.path.join(dirname, 'src', 'template.py'), os.path.join(dirname, 'src', '{}.py'.format(name))) sh.cd(os.path.join(dirname, 'src')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(orig_dir) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() print 'Publishing zip file to S3', 's3://{}/{}.zip'.format(bucket, name) b = _s3conn.get_bucket(bucket) k = Key(b) k.key = '{}.zip'.format(name) k.set_contents_from_filename(zip_name, cb=percent_cb, num_cb=10) try: _lambda.delete_function(FunctionName=name) except: pass b = _s3conn.get_bucket('idaho-lambda') for key in b.list(prefix=name): key.delete() print 'Creating function' code = {'S3Bucket': bucket, 'S3Key': '{}.zip'.format(name)} handler = '{}.handler'.format(name) role = 'arn:aws:iam::523345300643:role/lambda_s3_exec_role' _lambda.create_function(FunctionName=name, Code=code, Role=role, Handler=handler, Runtime='python2.7', Timeout=60, MemorySize=1024)
def display_provision_workflow_form(): form = ProvisionWorkflowForm(csrf=False) dir = path_expand( cm_config_server().get("cloudmesh.server.workflows.path")) filename = "abc" web_pwd = pwd().strip() print "PWD", web_pwd basename = "/static/{0}/{1}".format( dir, filename, ) # if form.validate_on_submit(): # print "SKIP" try: with open("{2}/{0}.{1}".format(basename, "diag", web_pwd), "w") as f: # print "########################################################################################" # print "aaaaaa"+form.workflow.data+"bbb" f.write("blockdiag {\n") if form.workflow.data == "": form.work.data = f.work.default if form.properties.data == "": form.properties.data = form.properties.default f.write(form.properties.data) f.write("//graph\n") f.write(form.workflow.data) f.write("\n}") # print # "########################################################################################" print form.workflow except: print "file does not exists" print "{0}.{1}".format(basename, diagram_format) print "OOOO", basename blockdiag("--ignore-pil", "-Tsvg", "-o", "{2}/{0}.{1}".format(basename, diagram_format, web_pwd), "{2}/{0}.{1}".format(basename, "diag", web_pwd)) # blockdiag("-Tpng", # "-o", "." + dir + filename + ".png", # "." + dir + filename + ".diag") # else: # flash("Wrong submission") inventory.refresh() return render_template("provision_workflow.html", workflow=form.workflow.data, form=form, pwd=pwd, diagram="{0}.{1}".format(basename, diagram_format), inventory=inventory)
def stop(name): """Stop a specific docker container server""" pwd = sh.pwd(_out=sys.stdout) path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(path, "server") sh.cd(path) sh.docker_compose("rm", "--force", "--stop", name, _out=sys.stdout) sh.cd(pwd)
def gitDownload(url, destination): print('Now git cloning from URL %s to %s' % (url, destination)) print(sh.pwd()) for line in sh.git.clone(url, '--progress', '--recursive', _err_to_out=True, _iter=True): print(line) return
def do_preconditioned_summation(kernel_process_directory): """ do_preconditioned_summation: get the script to do the preconditioned summation. """ result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" result += f"ibrun ./bin/xsum_preconditioned_kernels;" result += f"cd {current_path};\n" return result
def rename(event_path_this_rank): root_path = str(sh.pwd())[:-1] for thedir in event_path_this_rank: sh.cd(thedir) for fname in glob.glob("*.SAC"): net, sta, loc, chn = fname.split('.')[6:10] # logger.info( # f"[rank:{rank},dir:{thedir}] rename {fname} to {net}.{sta}.{loc}.{chn}.SAC") sh.mv(fname, f"{net}.{sta}.{loc}.{chn}.SAC") sh.cd(root_path)
def copy_files(self): """ Copy the LICENSE and CONTRIBUTING files to each folder repo """ # TODO: Add .gitattributes for line endings (and .gitignore?) FILES = ['LICENSE.md', 'CONTRIBUTING.md'] this_dir = sh.pwd().strip() for _file in FILES: sh.cp( '{0}/templates/{1}'.format(this_dir, _file), '{0}/'.format(self.directory) )
def start(name): """Start a specific docker container server""" pwd = sh.pwd(_out=sys.stdout) path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) path = os.path.join(path, "server") sh.cd(path) sh.docker_compose("up", "-d", name, _out=sys.stdout) sh.docker_compose("up", "waiter", _out=sys.stdout) sh.cd(pwd)
def update_model_from_line_search(kernel_process_directory, nproc): """ itern_generate_perturbed_kernel: generate the perturbed model for the following steps. """ result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" # we should read in the value in join(current_path,STEP_LENGTH) result += f"ibrun -n {nproc} ./bin/xadd_model_tiso_cg `cat {join(current_path,'STEP_LENGTH')}`;" result += f"cd {current_path};\n" return result
def run(self): while True: time.sleep(self.sleep_interval) print "Waking" if self.queue.length == 0: continue task = self.queue.dequeue() data = task.data url = data['repository']['url'] # TODO -- don't clone this url. But fork and clone our url. name = data['repository']['name'] owner = data['repository']['owner']['name'] self.working_dir = tempfile.mkdtemp( prefix=owner + '-' + name, dir=self.scratch_dir, ) print "** Cloning to", self.working_dir print sh.git.clone(url, self.working_dir) print "** Processing files." for root, dirs, files in os.walk(self.working_dir): if '.git' in root: continue for filename in files: if filename.endswith(".py"): infile = root + "/" + filename print "** Tidying", infile tmpfile = infile + ".bak" script = os.path.expanduser( "~/devel/PythonTidy/PythonTidy.py" ) sh.python(script, infile, tmpfile) shutil.move(tmpfile, infile) with directory(self.working_dir): print sh.pwd() print sh.git.status()
def copy_files(self): """ Copy the LICENSE and CONTRIBUTING files to each folder repo """ # TODO: Add .gitattributes for line endings (and .gitignore?) # license = resource_filename(__name__, 'templates/LICENSE') contributing = resource_filename(__name__, 'templates/CONTRIBUTING.md') FILES = [contributing] this_dir = sh.pwd().strip() for _file in FILES: sh.cp( _file, '{0}/'.format(self.directory) )
def ln_smoothed_kernel_to_input_dir(pyexec, smooth_dir, kernel_process_directory): """ ln_smoothed_kernel_to_input_dir: remove the flag smooth and link the smoothed kernels to the directory INPUT_GRADIENT. """ result = "" # current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {current_path};" result += f"{pyexec} -m seisflow.scripts.structure_inversion.ln_smoothed_kernel_to_input_dir --smooth_dir {smooth_dir} --kernel_process_directory {kernel_process_directory};" result += f"cd {current_path};\n" return result
def doReindex(): options = setup() ecoList = options.dest.split(',') for eco in ecoList: destEco = eco[0] destOS = eco[1:] for os in destOS: # Recreate the index for the repository if os is 'r': print 'Recreating index for Redhat: %s' % REINDEX_CMD_LIST['r'] sh.cd('%s/redhat' % ECO_DIR_DICT[destEco]) sh.createrepo('.', _out=logfile) elif os is 'u': print 'Recreating index for Ubuntu: %s' % REINDEX_CMD_LIST['u'] sh.cd('%s/ubuntu' % ECO_DIR_DICT[destEco]) sh.pwd(_out=logfile) run = sh.Command('./update-archive.sh') run(_out=logfile, _tty_out=logfile) else: print 'This is an invalid OS'
def display_provision_workflow_form(): form = ProvisionWorkflowForm(csrf=False) dir = path_expand(cm_config_server().get("cloudmesh.server.workflows.path")) filename = "abc" web_pwd = pwd().strip() print "PWD", web_pwd basename = "/static/{0}/{1}".format(dir, filename,) # if form.validate_on_submit(): # print "SKIP" try: with open("{2}/{0}.{1}".format(basename, "diag", web_pwd), "w") as f: # print "########################################################################################" # print "aaaaaa"+form.workflow.data+"bbb" f.write("blockdiag {\n") if form.workflow.data == "": form.work.data = f.work.default if form.properties.data == "": form.properties.data = form.properties.default f.write(form.properties.data) f.write("//graph\n") f.write(form.workflow.data) f.write("\n}") # print "########################################################################################" print form.workflow except: print "file does not exists" print "{0}.{1}".format(basename, diagram_format) print "OOOO", basename blockdiag("--ignore-pil", "-Tsvg", "-o", "{2}/{0}.{1}".format(basename, diagram_format, web_pwd), "{2}/{0}.{1}".format(basename, "diag", web_pwd)) # blockdiag("-Tpng", # "-o", "." + dir + filename + ".png", # "." + dir + filename + ".diag") # else: # flash("Wrong submission") inventory.refresh() return render_template("provision_workflow.html", workflow=form.workflow.data, form=form, pwd=pwd, diagram="{0}.{1}".format(basename, diagram_format), inventory=inventory)
def iter1_generate_perturbed_kernel(kernel_process_directory, perturbed_value): """ iter1_generate_perturbed_kernel: generate the perturbed kernel used for line search. iter1 will use only the steepest descent method. """ result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" # when we run add_model_globe_tiso, INPUT_MODEL(from previous gll directory), INPUT_GRADIENT(from ln smooth), topo(from database) have all been established. result += f"ibrun ./bin/xadd_model_tiso {perturbed_value};" # we should move the kernel files in OUTPUT_MODEL to perturbed_{perturbed_value}_for_line_search result += f"mkdir -p perturbed_{perturbed_value}_for_line_search;" result += f"mv OUTPUT_MODEL/* perturbed_{perturbed_value}_for_line_search/;" result += f"cd {current_path};\n" return result
def update_model_given_step_length(kernel_process_directory, perturbed_value): """ update_model_given_step_length: update the model by optimized step length. """ result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" # when we run add_model_globe_tiso, INPUT_MODEL(from previous gll directory), INPUT_GRADIENT(from ln smooth), topo(from database) have all been established. result += f"ibrun ./bin/xadd_model_tiso {perturbed_value};" # we should move the kernel files in OUTPUT_MODEL to perturbed_{perturbed_value}_for_next_iteration result += f"mkdir -p perturbed_{perturbed_value}_for_next_iteration;" result += f"mv OUTPUT_MODEL/* perturbed_{perturbed_value}_for_next_iteration/;" result += f"cd {current_path};\n" return result
def repository(namespace, name, branch='master'): '''Returns a repository''' with TemporaryDirectory() as download_path: old_directory = str(pwd()).strip() try: git.clone('https://github.com/{0}/{1}.git'.format(namespace, name), download_path) cd(download_path) git.fetch('origin', branch) git.checkout(branch) yield (download_path, git('rev-parse', 'HEAD'), redis.Dict(key="{0}.{1}".format(namespace, name))) except ErrorReturnCode_128: mkdir(download_path) yield (None, None, None) cd(old_directory)
def run_nosetest(test=None): if test is None: filename = "/tmp/nosetests_all.json" testname = "" else: filename = "/tmp/nosetests_{0}.json".format(test) testname = "../tests/test_{0}.py".format(test) print "PWD", pwd() try: result = nosetests("--with-json", "-w", "../tests", "--json-file='{0}' {1}".format(filename, testname)) except: pass print "RRRR", result return redirect("/test/nose")
def pull(url, path, remote=None): here = str(pwd()).strip() try: cd(path) except OSError: print "path does not exist? {}".format(path) cd(here) return try: git.status() except ErrorReturnCode_128: print "{} is not a git repository!".format(path) cd(here) return git.pull(remote or url, 'master') git.checkout('-f') cd(here)
def setUp(self): super(ArchiveRepositoryAuthenticatedMethodsTest, self).setUp() # Clone test repository localy. repo_origin = '[email protected]:%s/%s.git' % (self.bb.username, self.bb.repo_slug) # TODO : Put the temp folder on the right place for windows. repo_folder = os.path.join( '/tmp', 'bitbucket-' + ''.join(random.choice(string.digits + string.letters) for x in range(10))) sh.mkdir(repo_folder) sh.cd(repo_folder) self.pwd = sh.pwd().strip() sh.git.init() sh.git.remote('add', 'origin', repo_origin) # Add commit with empty file. sh.touch('file') sh.git.add('.') sh.git.commit('-m', '"Add empty file."') sh.git.push('origin', 'master')
def main(base_directory, simulation_type): flag = None if (simulation_type == "forward"): flag = "-f" elif (simulation_type == "source"): flag = "-a" elif (simulation_type == "structure"): flag = "-b" elif (simulation_type == "forward_save"): flag = "-F" else: raise Exception("no such simulation type") all_simulation_directories = sorted(glob(join(base_directory, "*"))) current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable for each_simulation_directory in all_simulation_directories: sh.cd(each_simulation_directory) sh.perl("change_simulation_type.pl", flag) sh.cd(current_path)
def gitrepo(root): tmpdir = sh.pwd().strip() sh.cd(root) gitlog = sh.git('--no-pager', 'log', '-1', pretty="format:%s" % FORMAT).split('\n', 5) branch = os.environ.get('CIRCLE_BRANCH') or os.environ.get('TRAVIS_BRANCH', sh.git('rev-parse', '--abbrev-ref', 'HEAD').strip()) remotes = [x.split() for x in filter(lambda x: x.endswith('(fetch)'), sh.git.remote('-v').strip().splitlines())] sh.cd(tmpdir) return { "head": { "id": gitlog[0], "author_name": gitlog[1], "author_email": gitlog[2], "committer_name": gitlog[3], "committer_email": gitlog[4], "message": gitlog[5].strip(), }, "branch": branch, "remotes": [{'name': remote[0], 'url': remote[1]} for remote in remotes] }
def setUp(self): """ Clone the test repo locally, then add and push a commit.""" super(ArchiveRepositoryAuthenticatedMethodsTest, self).setUp() # Clone test repository localy. repo_origin = '[email protected]:%s/%s.git' % (self.bb.username, self.bb.repo_slug) # TODO : Put the temp folder on the right place for windows. repo_folder = os.path.join( '/tmp', 'bitbucket-' + ''.join(random.choice(string.digits + string.letters) for x in range(10))) sh.mkdir(repo_folder) sh.cd(repo_folder) self.pwd = sh.pwd().strip() sh.git.init() sh.git.remote('add', 'origin', repo_origin) # Add commit with empty file. sh.touch('file') sh.git.add('.') sh.git.commit('-m', '"Add empty file."') sh.git.push('origin', 'master')
def process_layer(self, typename): fname_in = '%s.shp' % typename fname_out = '%s_rasterized.tiff' % typename pwd = str(sh.pwd()).strip() try: sh.cd('/tmp/layer') sh.rm("-rf",sh.glob('*')) sh.unzip('../layer.zip') saga_cmd.shapes_points("Points Filter", POINTS=fname_in, FIELD="MASA_HUMEDO", FILTER="tmp1.shp", RADIUS=100, MINNUM=25, MAXNUM=200, METHOD=4, PERCENT=15) saga_cmd.shapes_points("Points Filter", POINTS="tmp1.shp", FIELD="MASA_HUMEDO", FILTER="tmp2.shp", RADIUS=100, MINNUM=25, MAXNUM=200, METHOD=5, PERCENT=90) saga_cmd.grid_gridding("Shapes to Grid", INPUT="tmp2.shp", FIELD="MASA_HUMEDO", MULTIPLE=4, LINE_TYPE=0, GRID_TYPE=3, USER_SIZE=0.0001, TARGET=0, USER_GRID="tmp3.sgrd") saga_cmd.grid_tools("Close Gaps", INPUT="tmp3.sgrd", RESULT="tmp4.sgrd") saga_cmd.shapes_points("Convex Hull", SHAPES="tmp2.shp", HULLS="tmphull.shp", POLYPOINTS=0) saga_cmd.shapes_grid("Clip Grid with Polygon", INPUT="tmp4.sgrd", OUTPUT="tmp5.sgrd", POLYGONS="tmphull.shp") saga_cmd.grid_filter("Gaussian Filter", INPUT="tmp5.sgrd", RESULT="tmp6", SIGMA=3, MODE=1, RADIUS=50) sh.gdal_translate("-of", "gtiff", "tmp6.sdat", fname_out) finally: sh.cd(pwd) return '/tmp/layer/%s' % fname_out
def update_model_from_line_search(kernel_process_directory, nproc): """ itern_generate_perturbed_kernel: generate the perturbed model for the following steps. """ pyexec = sys.executable result = "" current_path = str(sh.pwd())[:-1] # pylint: disable=not-callable result += f"cd {kernel_process_directory};" # we should read in the value in join(current_path,STEP_LENGTH) result += f"ibrun -n {nproc} ./bin/xadd_model_azi_cg `cat {join(current_path,'STEP_LENGTH')}`;\n" # create gll_for_OUTPUT_MODEL and copy mu0 result += f"mkdir -p gll_for_OUTPUT_MODEL;" result += f"cd {current_path};\n" result += ln_new_model_to_gll( pyexec, join(kernel_process_directory, f"OUTPUT_MODEL"), join(kernel_process_directory, f"gll_for_OUTPUT_MODEL")) result += f"cd {kernel_process_directory};" result += f"cp INPUT_MODEL/*mu0* gll_for_OUTPUT_MODEL;\n" result += f"cd {current_path};\n" return result
YANDEX_SEARCH_ID = "" SOURCE = "/home/graf/projects/web/ubuntu_bible/content/ubuntu_faq.md" logging.root.setLevel(logging.DEBUG) LINKS_ON_PAGE = 20 PAGE_PATTERN = "From%dto%d" VERSION = 0.2 if (len(sys.argv)>1): BASE_URL = sys.argv[1] if BASE_URL[-1]!="/": BASE_URL+="/" if len(sys.argv)>2: YANDEX_SEARCH_ID=sys.argv[2] else: BASE_URL = "file://"+str(pwd()).replace("\n","")+"/_site/" #Code def materialize_template(template_name,fname,env): os.chdir("_templates") env['base_url']=BASE_URL env['website_title']=WEBSITE_TITLE env['default_tags']=DEFAULT_TAGS env['current_url']=BASE_URL+fname+".html" env['version']=VERSION env['yandex_search_id']=YANDEX_SEARCH_ID mylookup = TemplateLookup(directories=['.'],input_encoding='utf-8', output_encoding='utf-8', encoding_errors='replace') result = Template(filename=template_name+".mako", lookup=mylookup,
def gitDownload(url, destination): print('Now git cloning from URL %s to %s' % (url, destination)) print(sh.pwd()) for line in sh.git.clone(url, '--progress', '--recursive', _err_to_out=True, _iter=True, _out_bufsize=100): print(line) return
def working_directory(directory): _cwd = sh.pwd() sh.cd(directory) yield sh.cd(_cwd)
def __init__(self, path): self._og_directory = str(sh.pwd()).strip('\n') self._dest_directory = path
# pattern = ' -i {0} -map 0:a -map_metadata -1 {1} -q:a {2} -v 0 -y ' pattern = ' -i {0} -map 0:a -map_metadata -1 {1} -q:a {2} -y -nostats -loglevel 0' params = pattern.format(shellquote(source_path), shellquote(dest_path+'.'+dest_ext),q) subprocess.call(ffmpeg+params, shell=True) print("Finished converting "+source_path) def process_album(src, dst, cdid): dst_ext = 'mp3' q = 4 #idk why i used glob to path, but it works only this way src = glob(src[:-1])[0] + "/" allowed_extensions = ['m4a','mp3','flac'] for ext in allowed_extensions: files = src+"*."+ext for file in glob(files): filename = "".join((file.split('/')[-1]).split('.')[:-1]) print("Starting converting " +file) dst_path = '{0}/CD0{1}/{2}'.format(dst,cdid,filename) process_song(file, dst_path,dst_ext,q) if __name__ == '__main__': try: src = str(sh.pwd()) dst = '/Volumes/UNTITLED/' #dst = sys.argv[2] cdid = sys.argv[1] process_album(src, dst, cdid) except Exception as e: print("bad input") print(e)
def display_rack_map(): #### # # Flag of debug, True means generate fake data with random generator # False means fetch the real data from server #### flag_debug = False # class name means the specific class to generate map for different service type # method name means the specific method to fetch real data of different service type, # the methods are defined in class FetchClusterInfo service_options = { "temperature": { "class": HeatClusterMap, "method": "fetch_temperature_ipmi", }, "service": { "class": ServiceClusterMap, "method": "fetch_service_type", }, } # rack denote the rack user selected # service denote the service user selected on the specific rack rack = request.form['select_rack'] service = request.form['select_service'] # double check to make sure rack can provide the specific service rack_form = RackForm() if rack not in rack_form.all_services_dict[service]["clusters"]: log.error("Someone try to hack the service [service: '{0}' on rack: '{1}'] provided by Rack Diagram. Just ignore it.".format( service, rack)) return redirect("/inventory/rack") # get location of configuration file, input diag, output image dir_base = config_file("") server_config = cm_config_server() relative_dir_diag = server_config.get("cloudmesh.server.rack.input") relative_dir_image = server_config.get( "cloudmesh.server.rack.diagrams.{0}".format(service)) # log.debug("relative dir image, {0}".format(relative_dir_image)) flask_dir = "static" # guess absolute path of cloudmesh_web rack_py_dir = pwd().strip().split("/") cloudmesh_web_dir = rack_py_dir # [:-1] # log.debug("cloudmesh_web dir, {0}".format(cloudmesh_web_dir)) list_image_dir = [flask_dir] + relative_dir_image.strip().split("/") abs_dir_image = "/".join(cloudmesh_web_dir + list_image_dir) abs_dir_diag = dir_base + "/" + relative_dir_diag # dynamic generate image map_class = service_options[service]["class"]( rack, dir_base, abs_dir_diag, abs_dir_image) # get cluster server data dict_data = None if flag_debug: dict_data = map_class.genRandomValues() else: # fetch the real data .... # TODO cloudmesh.hpc.proxyserver # should we add a field in cloudmesh.yaml for the proxy server to run # pbsnodes ??? config = cm_config() user = config.get("cloudmesh.hpc.username") myfetch = FetchClusterInfo(user, "india.futuregrid.org") flag_filter = None if rack == "all" else rack # If user want to customize the action, user can set optional param here # by calling map_class.set_optional_param(value) # optional param aparam = map_class.get_optional_param() dict_data = getattr(myfetch, service_options[service]["method"])( flag_filter, aparam) # update data map_class.update(dict_data) # plot map map_class.plot() # get image names filename_image = map_class.getImageFilename() filename_legend = map_class.getLegendFilename() image_size = map_class.getImageSize() legend_size = map_class.getImageLegendSize() # log.debug("legend size is: {0}".format(legend_size)) abs_web_path_image = "/".join([""] + list_image_dir + [filename_image]) abs_web_path_legend = "/".join([""] + list_image_dir + [filename_legend]) img_flag = "?" + str(time.time()) return render_template("mesh/rack/rack.html", flag_home=False, rack=rack, imageWidth=image_size["width"], imageHeight=image_size["height"], legendWidth=legend_size["width"], legendHeight=legend_size["height"], service=service, imageFilename=abs_web_path_image + img_flag, legendFilename=abs_web_path_legend + img_flag )
""" <Python 2.7> """ """ sh < sudo pip install --user sh > Python makes a great scripting language. Sometimes using the standard os and subprocess libraries can be a bit of a headache. """ import sh print(sh.pwd()) print(sh.whoami()) #sh.mkdir('new_folder') #sh.touch('new_file.txt') print('-------------------------------------------------------------------') """ emoji :: https://pypi.org/project/emoji/ pip install emoji """ import emoji
#!/usr/local/bin/python3 # This python script checks every single subdirectory in the current directory # and runs `git pull` if the subdirectory is a git repository. import sh # import os # This version parses the result of ls, which is not a good idea. # http://mywiki.wooledge.org/ParsingLs # ----------------------------------------------------------------------------- # directories = sh.ls("-d", sh.glob("*/")).split() # for directory in directories: # subdirectories = sh.ls("-a", directory).split() # if(".git" in subdirectories): # os.chdir(directory) # print (sh.pwd(),end="") # print (sh.git.pull("origin","master")) # os.chdir("..") # This version parses the result of find # ----------------------------------------------------------------------------- repositories = sh.find(".", "-name", ".git") for repository in repositories: sh.cd(repository [2: len(repository) - 4]) print (sh.pwd(), end = "") print (sh.git.pull("origin","master")) sh.cd("..")
def generate_map(self, service, rack_name, refresh_flag=False): # the following begin to generate map # class name means the specific class to generate map for different service type # method name means the specific method to fetch real data of different service type, # the methods are defined in class FetchClusterInfo service_options = { "temperature": { "class": HeatClusterMap, "method": "read_temperature_mongo", }, "service": { "class": ServiceClusterMap, "method": "read_service_mongo", }, } # update progress satus self.get_map_progress(service) # get location of configuration file, input diag, output image dir_base = config_file("") server_config = cm_config_server() relative_dir_diag = server_config.get("cloudmesh.server.rack.input") relative_dir_image = server_config.get( "cloudmesh.server.rack.diagrams.{0}".format(service)) # log.debug("relative dir image, {0}".format(relative_dir_image)) flask_dir = "static" # guess absolute path of cloudmesh_web rack_py_dir = pwd().strip().split("/") cloudmesh_web_dir = rack_py_dir # log.debug("cloudmesh_web dir, {0}".format(cloudmesh_web_dir)) list_image_dir = [flask_dir] + relative_dir_image.strip().split("/") abs_dir_image = "/".join(cloudmesh_web_dir + list_image_dir) abs_dir_diag = dir_base + "/" + relative_dir_diag # dynamic generate image map_class = service_options[service]["class"]( self.username, rack_name, dir_base, abs_dir_diag, abs_dir_image) # get cluster server data dict_data = None if False: dict_data = map_class.genRandomValues() else: # flag_filter = None if rack_name == "all" else rack_name # If user want to customize the action, user can set optional param here # by calling map_class.set_optional_param(value) # optional param aparam = map_class.get_optional_param() dict_data = getattr(self, service_options[service]["method"])( rack_name, aparam, refresh_flag) # update data map_class.update(dict_data) # plot map map_class.plot() # get image names filename_image = map_class.getImageFilename() filename_legend = map_class.getLegendFilename() image_size = map_class.getImageSize() legend_size = map_class.getImageLegendSize() # log.debug("legend size is: {0}".format(legend_size)) abs_web_path_image = "/".join([""] + list_image_dir + [filename_image]) abs_web_path_legend = "/".join([""] + list_image_dir + [filename_legend]) img_flag = "?" + str(time.time()) map_data = { "map_width": image_size["width"], "map_height": image_size["height"], "legend_width": legend_size["width"], "legend_height": legend_size["height"], "map_url": abs_web_path_image + img_flag, "legend_url": abs_web_path_legend + img_flag, } self.map_progress.update_data("map_data", map_data)