def reduce_python(self): print("Reduce python") oldpwd = os.getcwd() try: print("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python")) sh.rm("-rf", "share") sh.rm("-rf", "bin") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("libpython2.7.a") os.chdir( join(self.ctx.dist_dir, "root", "python", "lib", "python2.7")) sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";") sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";") #sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";") sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot") sh.rm("-rf", sh.glob("lib*")) # now create the zip. print("Create a python27.zip") sh.rm("config/libpython2.7.a") sh.rm("config/python.o") sh.rm("config/config.c.in") sh.rm("config/makesetup") sh.rm("config/install-sh") sh.mv("config", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python27.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def clean(): proj() print ". cleaning up build and dist" try: sh.rm("-r", sh.glob("dist/*"), sh.glob("build/*")) except: print ".. already clean"
def reduce_python(self): print("Reduce python") oldpwd = os.getcwd() try: print("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python")) sh.rm("-rf", "share") sh.rm("-rf", "bin") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("libpython2.7.a") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib", "python2.7")) sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";") sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";") #sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";") sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot") sh.rm("-rf", sh.glob("lib*")) # now create the zip. print("Create a python27.zip") sh.rm("config/libpython2.7.a") sh.rm("config/python.o") sh.rm("config/config.c.in") sh.rm("config/makesetup") sh.rm("config/install-sh") sh.mv("config", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python27.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def main(): logging.debug('start') if YANDEX_SEARCH_ID == "": logging.warn( 'to enable seach on your site run\n python3 build3.py "http://website.url/" 123\n where 123 is yandex search id obtainable on http://site.yandex.ru/searches/new/' ) #create and clear output directory if necessary mkdir("-p", "_site/") rm("-Rf", glob("_site/*")) #copy static contant cp("-a", glob("_web/*"), "_site/") mv("_site/dot_htaccess", "_site/.htaccess") #copy optimized css cssoptimizer(cat(glob("_css/*")), "-i", "_site/style.css") #copy optimized js uglifyjs(cat(glob("_js/*")), "-o", "_site/scripts.js") #generate content materialize_notes(SOURCE) materialize_template("Y_Search", "Y_Search", {"title": "Поиск"}) logging.debug('end.') logging.info( 'To start copy following url into your browser: \n%sindex.html' % BASE_URL)
def save_data(last_submitted, sim_data, sim_type): qstat = sh.Command("qstat") grep = sh.Command("grep") are_on_queue = True while are_on_queue: try: grep(qstat(), str(last_submitted)) print "{0} submitted last still on queue," \ " waiting 20 min from {1}".format(last_submitted, datetime.datetime.now().time()) time.sleep(20 * 60) except: are_on_queue = False tar = sh.Command("tar") for name, tar_pattern in iterate_over_folders(sim_data, [create_mini_tar_names], sim_type): tar("-czpf", name, sh.glob(tar_pattern)) for name, tar_pattern in iterate_over_folders(sim_data, [create_full_tar_names], sim_type): try: tar("-czpf", name, sh.glob(tar_pattern)) except: pass for name, tar_pattern in iterate_over_folders(sim_data, [create_pics_tar_names], sim_type): try: tar("-czpf", name, sh.glob(tar_pattern)) except: pass
def clean(): """clean up generated files""" proj() print ". cleaning up build and dist" try: sh.rm("-r", sh.glob("dist/*"), sh.glob("build/*")) except Exception, err: print ".. already clean: %s" % err
def reduce_python(self): logger.info("Reduce python") oldpwd = os.getcwd() try: logger.info("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python3")) # os.execve("/bin/bash", ["/bin/bash"], env=os.environ) sh.rm("-rf", "bin", "share") # platform binaries and configuration os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7", "config-3.7m-darwin")) sh.rm("libpython3.7m.a") sh.rm("python.o") sh.rm("config.c.in") sh.rm("makesetup") sh.rm("install-sh") # cleanup pkgconfig and compiled lib os.chdir(join(self.ctx.dist_dir, "root", "python3", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("-f", "libpython3.7m.a") # cleanup python libraries os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7")) sh.rm("-rf", "wsgiref", "curses", "idlelib", "lib2to3", "ensurepip", "turtledemo", "lib-dynload", "venv", "pydoc_data") sh.find(".", "-path", "*/test*/*", "-delete") sh.find(".", "-name", "*.exe", "-type", "f", "-delete") sh.find(".", "-name", "test*", "-type", "d", "-delete") sh.find(".", "-iname", "*.pyc", "-delete") sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # now precompile to Python bytecode hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, "-m", "compileall", "-f", "-b") # sh.find(".", "-iname", "*.py", "-delete") # some pycache are recreated after compileall sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # create the lib zip logger.info("Create a python3.7.zip") sh.mv("config-3.7m-darwin", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python37.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config-3.7m-darwin", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def deploy(name, fn=None, bucket='lambda_methods'): print 'Preparing lambda method:', name orig_dir = sh.pwd().strip() dirname = '{}/{}'.format(orig_dir, name) zip_name = '{}/{}.zip'.format(dirname, name) if os.path.exists( dirname ): sh.rm('-rf', dirname) # cp skeleton project data sh.cp('-r', os.path.join(os.path.dirname(__file__), 'project'), dirname) base_zip = '{}/dist.zip'.format(dirname) if not os.path.exists(base_zip): _docker('--rm', '-v', '{}:/app'.format(dirname), 'quay.io/pypa/manylinux1_x86_64', '/app/scripts/build.sh') sh.zip('-9', zip_name, '-j', '{}/README.md'.format(dirname)) sh.cd(os.path.join(dirname, 'build')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(dirname) else: sh.mv( base_zip, zip_name ) if fn is not None: with open(os.path.join(dirname, 'src', 'custom.py'), 'w') as fh: fh.write(fn) sh.cp(os.path.join(dirname, 'src', 'template.py'), os.path.join(dirname, 'src', '{}.py'.format(name))) sh.cd(os.path.join(dirname, 'src')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(orig_dir) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() print 'Publishing zip file to S3', 's3://{}/{}.zip'.format(bucket, name) b = _s3conn.get_bucket(bucket) k = Key(b) k.key = '{}.zip'.format(name) k.set_contents_from_filename(zip_name, cb=percent_cb, num_cb=10) try: _lambda.delete_function(FunctionName=name) except: pass b = _s3conn.get_bucket('idaho-lambda') for key in b.list(prefix=name): key.delete() print 'Creating function' code = {'S3Bucket': bucket, 'S3Key': '{}.zip'.format(name)} handler = '{}.handler'.format(name) role = 'arn:aws:iam::523345300643:role/lambda_s3_exec_role' _lambda.create_function(FunctionName=name, Code=code, Role=role, Handler=handler, Runtime='python2.7', Timeout=60, MemorySize=1024)
def clean(): """clean up generated files""" proj() print ". cleaning up build and dist" try: sh.rm("-r", sh.glob("dist/*"), sh.glob("build/*") ) except Exception, err: print ".. already clean: %s" % err
def split(self): # Call Roche binary # barcode_file = TmpFile.from_string(self.barcode_text) sh.sfffile("-s", "barcodes_keyword", "-mcf", barcode_file.path, self.path) # Check result # produced_files = set(sh.glob('454Reads.*.sff')) expected_files = set(['454Reads.%s.sff' % (sample.name.upper()) for sample in self.sample_links.values()]) assert produced_files == expected_files # Make piece objects # self.pieces = [SamplePiece(p, self) for p in sh.glob('454Reads.*.sff')] for piece in self.pieces: piece.rename() # Cleanup # barcode_file.remove()
def reduce_python(self): logger.info("Reduce python") logger.info("Remove files unlikely to be used") with cd(join(self.ctx.dist_dir, "root", "python3")): sh.rm("-rf", "bin", "share") # platform binaries and configuration with cd(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.8", "config-3.8-darwin")): sh.rm( "libpython3.8.a", "python.o", "config.c.in", "makesetup", "install-sh", ) # cleanup pkgconfig and compiled lib with cd(join(self.ctx.dist_dir, "root", "python3", "lib")): sh.rm("-rf", "pkgconfig", "libpython3.8.a") # cleanup python libraries with cd(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.8")): sh.rm("-rf", "curses", "idlelib", "lib2to3", "ensurepip", "turtledemo", "lib-dynload", "venv", "pydoc_data") sh.find(".", "-path", "*/test*/*", "-delete") sh.find(".", "-name", "*.exe", "-type", "f", "-delete") sh.find(".", "-name", "test*", "-type", "d", "-delete") sh.find(".", "-iname", "*.pyc", "-delete") sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # now precompile to Python bytecode hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, "-m", "compileall", "-f", "-b") # sh.find(".", "-iname", "*.py", "-delete") # some pycache are recreated after compileall sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # create the lib zip logger.info("Create a python3.8.zip") sh.mv("config-3.8-darwin", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python38.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config-3.8-darwin", ".") sh.mv("../site-packages", ".")
def func2(all): count = 0 for k in range(len(all)): at = all[k] # print 'at', at for indexf in at: func(indexf) from ENIIGMA.GA import GA_module_pyev as gam Sp = DIR + 'Workspace/Processing/Store_interp/' sh.cp( sh.glob(DIR + 'Workspace/Processing/Interp_proc/*.dat'), Sp) home1 = DIR + 'Workspace/Processing/Interp_proc/' home2 = DIR + '/Workspace/Processing/Store_interp/' f = gam.gamod(new_tau, new_etau, home1, home2, Stats=St, freq_stat=freqs, gen=gen, ga_min=ga_min, ga_max=ga_max, mutp=mutp, popsize=popsize, cc=count, fitness=fitness, initializator=initializator, mutator=mutator, crossover=crossover, scaling=scaling, selector=selector, termination=termination) sh.rm( sh.glob(DIR + 'Workspace/Processing/Interp_proc/*.dat')) sh.rm( sh.glob( DIR + 'Workspace/Processing/Interp_proc/OutFile.txt')) tscore = np.loadtxt( DIR + 'Workspace/Processing/Interp_proc/' + 'comb_score0.txt' ).T # pd.read_csv(DIR+'Workspace/Interp/'+'comb_score0.txt',sep='\s+', header=None) score = tscore[0] count = count + 1 print('round', count, 'of', len(all), 'Score =', score)
def split(self): # Call Roche binary # handle = tempfile.NamedTemporaryFile(delete=False) handle.write(self.barcode_file) handle.close() sh.sfffile("-s", "barcodes_keyword", "-mcf", handle.name, self.path) # Check result # produced_files = set(sh.glob('454Reads.*.sff')) expected_files = set(['454Reads.%s.sff' % (sample.name.upper()) for sample in self.sample_links.values()]) assert produced_files == expected_files # Make piece objects # self.pieces = [SamplePiece(p, self) for p in sh.glob('454Reads.*.sff')] for piece in self.pieces: piece.rename() os.remove(handle.name)
def testinfra(inventory, testinfra_dir, debug=False, env=None, out=print_stdout, err=print_stderr): """ Runs testinfra against specified ansible inventory file :param inventory: Path to ansible inventory file :param testinfra_dir: Path to the testinfra tests :param debug: Pass debug flag to testinfra :param env: Environment to pass to underlying sh call :param out: Function to process STDOUT for underlying sh call :param err: Function to process STDERR for underlying sh call :return: sh response object """ kwargs = { '_env': env, '_out': out, '_err': err, 'debug': debug, 'ansible_inventory': inventory, 'sudo': True, 'connection': 'ansible', 'n': 3 } if 'HOME' not in kwargs['_env']: kwargs['_env']['HOME'] = os.path.expanduser('~') tests = '{}/test_*.py'.format(testinfra_dir) tests_glob = sh.glob(tests) return sh.testinfra(tests_glob, **kwargs)
def runtime_assets(): rt_cfg = dict( themes=dict( path="lib/swatch/*.css", sub_data=lambda x: x.split(".")[1], sub_text=lambda x: x ), code_themes=dict( path="lib/cm/theme/*.css", sub_data=lambda x: os.path.basename(x)[0:-4], sub_text=lambda x: " ".join(x.split("-")).title() ), examples=dict( path="blockml/*.xml", sub_data=lambda x: os.path.basename(x)[0:-4], sub_text=lambda x: " ".join(x.split("_")).title() ) ) result = {} for thing, cfg in rt_cfg.items(): result[thing] = sorted([ (cfg["sub_text"](cfg["sub_data"](path)), cfg["sub_data"](path)) for path in sh.glob(cfg["path"]) ], key=lambda x: x[0].upper()) return result
def post_syslog(self, message, response): output = status.tar_syslog_files( "/run/shm/syslog-%s.tar.gz" % (datetime.datetime.now().strftime("%Y%m%d%H%M"))) headers = message.data.get("headers", {}) r = requests.post( message.data["url"], files={output: open(output, "rb")}, headers=headers, verify=False ) if r.status_code != requests.codes.ok: return response( code=r.status_code, data={"message": "Can't upload config."} ) sh.rm("-rf", sh.glob("/run/shm/syslog-*.tar.gz")) resp = r.json() if "url" not in resp: return response( code=500, data={"message": "Can't get file link."}) return response(data={"url": resp["url"]})
def build(self): for dep in self.aurdeps: try: dep.build() except BuildError: print("could not build dependency %s" % (dep.name)) return print("Building", self.name) os.chdir(self.path) try: results = sh.makepkg("-d", "--noconfirm", _err="/var/log/aur_repo/%s.log" % self.name) except sh.ErrorReturnCode_1: with open("/var/log/aur_repo/%s.log" % self.name) as log: if "A package has already been built" in log.read(): print("%s is up to date" % (self.name)) return raise BuildError except sh.ErrorReturnCode_2: print("Error building %s, see /var/log/aur_repo/%s.log" % (self.name, self.name)) raise BuildError for line in open("/var/log/aur_repo/%s.log" % self.name).read().split("\n"): if "Finished making" in line: tmp = line[line.find(":") + 1:].split()[1] self.pkg = sh.glob("%s/*%s*" % (self.path, tmp))[0] self.add()
def favicon(): """generate the favicon... ugly""" proj() print(". generating favicons...") sizes = [16, 32, 64, 128] tmp_file = lambda size: "/tmp/favicon-%s.png" % size for size in sizes: print("... %sx%s" % (size, size)) sh.convert( "design/logo.svg", "-resize", "%sx%s" % (size, size), tmp_file(size)) print(".. generating bundle") sh.convert( *[tmp_file(size) for size in sizes] + [ "-colors", 256, "static/img/favicon.ico" ] ) print(".. cleaning up") sh.rm(sh.glob("/tmp/favicon-*.png"))
def sh_asterisk(): ''' 如果遇到要传递参数为 * 的命令,应该怎么做 https://stackoverflow.com/questions/32923189/how-to-pass-an-asterisk-to-module-sh-in-python ''' # 等价 linux 命令 tar czf 1.tar.gz * sh.tar("czf", "1.tar.gz", sh.glob("*"))
def search(self, package, path): '''Looks for package in files in path using zgrep shell binary.''' try: log_lines = sh.zgrep(package, glob(path)) except sh.ErrorReturnCode_1 as e: # buffer overflown?? # don't know why this happens when using sh. log_lines = e.stdout log_lines = log_lines.split("\n") # get all but the last line -> get rid of last '' empty line log_lines = log_lines[:-1] for line in log_lines: #logger.debug("Following line was found:\n%s" % line) logger.debug("Following line containing metapackage was found " "in package manager's log files:") print(line) self.installation_lines.append(line) if not self.installation_lines: logger.info("zgrep could not find in logs any info that ", "can be used to uninstall the package.", "Exiting...") sys.exit() else: logger.info("Search results from zgrep where collected.") self._check() return self.main_line
def asset_links(asset_type): template = """ <li><a href="#%(thing)s" data-blockd3-%(thing)s="%(file)s"> %(text)s </a></li>""" cfg = dict( THEMES=dict( path="lib/swatch/*.css", thing="theme", sub_data=lambda x: x.split(".")[1], sub_text=lambda x: x ), EXAMPLES=dict( path="blockml/*.xml", thing="example", sub_data=lambda x: os.path.basename(x)[0:-4], sub_text=lambda x: " ".join(x.split("_")).title() ) )[asset_type] return "\n".join([ template % { "file": cfg["sub_data"](path), "thing": cfg["thing"], "text": cfg["sub_text"](cfg["sub_data"](path)) } for path in sh.glob(cfg["path"]) ])
def testinfra(testinfra_dir, debug=False, env=os.environ.copy(), out=logger.warning, err=logger.error, **kwargs): """ Runs testinfra against specified ansible inventory file :param inventory: Path to ansible inventory file :param testinfra_dir: Path to the testinfra tests :param debug: Pass debug flag to testinfra :param env: Environment to pass to underlying sh call :param out: Function to process STDOUT for underlying sh call :param err: Function to process STDERR for underlying sh call :return: sh response object """ kwargs['debug'] = debug kwargs['_env'] = env kwargs['_out'] = out kwargs['_err'] = err if 'HOME' not in kwargs['_env']: kwargs['_env']['HOME'] = os.path.expanduser('~') tests = '{}/test_*.py'.format(testinfra_dir) tests_glob = sh.glob(tests) return sh.testinfra(tests_glob, **kwargs)
def build_arch(self, arch): with current_directory(self.get_build_dir(arch.arch)): env = self.get_recipe_env(arch) cvsrc = self.get_build_dir(arch.arch) lib_dir = os.path.join(self.ctx.get_python_install_dir(), "lib") shprint( sh.cmake, '-DP4A=ON', '-DANDROID_ABI={}'.format(arch.arch), '-DCMAKE_TOOLCHAIN_FILE={}/platforms/android/android.toolchain.cmake' .format(cvsrc), '-DPYTHON_INCLUDE_PATH={}/include/python2.7'.format( env['PYTHON_ROOT']), '-DPYTHON_LIBRARY={}/lib/libpython2.7.so'.format( env['PYTHON_ROOT']), '-DPYTHON_NUMPY_INCLUDE_DIR={}/numpy/core/include'.format( env['SITEPACKAGES_PATH']), '-DANDROID_EXECUTABLE={}/tools/android'.format( env['ANDROID_SDK']), '-DBUILD_TESTS=OFF', '-DBUILD_PERF_TESTS=OFF', '-DBUILD_EXAMPLES=OFF', '-DBUILD_ANDROID_EXAMPLES=OFF', '-DPYTHON_PACKAGES_PATH={}'.format(env['SITEPACKAGES_PATH']), cvsrc, _env=env) shprint(sh.make, '-j', str(cpu_count()), 'opencv_python') shprint(sh.cmake, '-DCOMPONENT=python', '-P', './cmake_install.cmake') sh.cp('-a', sh.glob('./lib/{}/lib*.so'.format(arch.arch)), lib_dir)
def set_current(timestamp): """ Set an app directory to the currently live app by creating a symlink as specified in config """ app_path = path.join(install_parent, timestamp) log( "Linking live path '{live}' to app dir: {app_dir}".format( app_dir=app_path, live=live_link_path ) ) run(sh.rm, live_link_path, force=True) run(sh.ln, app_path, live_link_path, symbolic=True) site_to_enable = path.join(sites_available_dir, timestamp) site_links = sh.glob(path.join(sites_enabled_dir, '*')) # Delete existing site links run(sh.rm, site_links, f=True) # Add our link into sites-enabled run(sh.ln, site_to_enable, sites_enabled_path, s=True) # Restart apache restart()
def test_piping(): from sh import sort, du, glob, wc, ls # sort this directory by biggest file print sort(du(glob('*'), '-sb'), '-rn') # print the number of folders and files in /etc print wc(ls('/etc', '-l'), '-l')
def modified(self): print "purging cache for {slug}".format(slug=self.slug) if s.WMS_CACHE_DB.exists(self.slug): cached_filenames = s.WMS_CACHE_DB.smembers(self.slug) for filename in cached_filenames: sh.rm('-rf', sh.glob(filename+"*")) s.WMS_CACHE_DB.srem(self.slug, cached_filenames)
def test_glob_expansion(): # TODO: error import sh # this will not work sh.ls('*.py') sh.ls(sh.glob('*.py'))
def clean_up(files, email_name, options): log.info("Preparing to clean up") mv = sh.mv move_tups = [] stat_glob = options.dir+"*.stat/*" stat_dir_glob = options.dir+"*.stat/" stat_dest = options.output+"stat/" count_glob = options.dir+"*.genes.results" isoform_glob = options.dir+"*isoforms.results" count_dest = options.output+"counts/" log.info("Moving data files") if not options.no_bam: gbam_glob = options.dir+"*.genome.sorted.bam" gbam_dest = options.output+"bams/" move_tups.append((gbam_glob, gbam_dest)) if not options.no_bam and not options.no_wig: wig_glob = options.output+"*.wig" bw_glob = options.output+"*.bw" vis_dest = options.output+"vis/" move_tups.append((wig_glob, vis_dest)) move_tups.append((bw_glob, vis_dest)) for glob, dest in move_tups: mv(sh.glob(glob), dest) mv(sh.glob(count_glob), count_dest) mv(sh.glob(isoform_glob), count_dest) mv(sh.glob(stat_glob), stat_dest) log.info("Deleting junk files") report_files = [files[0], files[1], log.handlers[0].baseFilename] subject = "RSEM/deseq2 pipeline" if options.exp_name != "rsem-deseq": subject += " - {}".format(options.exp_name) log.info("Sending report") send_report(list(options.to), subject, report_files) log.info("Run complete! Congradulations!")
def trigger_testbuild(repo, github_opts): pr_id, head_sha1, pr_branch = github_opts.split(':') olddir = os.getcwd() ptfdir = repo + ':' + github_opts webroot = prep_webroot(ptfdir) workdir = tempfile.mkdtemp() build_failed = False try: pkg = repo if repo.startswith("crowbar") else "crowbar-" + repo spec = pkg + '.spec' try: os.chdir(workdir) buildroot = os.path.join(os.getcwd(), 'BUILD') iosc('co', IBS_MAPPING[pr_branch], pkg, '-c') os.chdir(pkg) add_pr_to_checkout(repo, pr_id, head_sha1, pr_branch, spec) iosc('build', '--root', buildroot, '--noverify', '--noservice', REPO_MAPPING[pr_branch], 'x86_64', spec, _out=sys.stdout) except: build_failed = True print("Build failed: " + str(sys.exc_info()[1])) raise else: sh.cp( '-p', sh.glob(os.path.join(buildroot, '.build.packages/RPMS/*/*.rpm')), webroot) finally: os.chdir(olddir) sh.cp('-p', os.path.join(buildroot, '.build.log'), os.path.join(webroot, 'build.log')) finally: sh.sudo.rm('-rf', workdir) if not build_failed: job_parameters = ('nodenumber=2', 'networkingplugin=openvswitch') if repo in JOB_PARAMETERS: job_parameters = JOB_PARAMETERS[repo] jenkins_job_trigger(repo, job_parameters, github_opts, CLOUDSRC[pr_branch], ptfdir) if repo in HA_JOB_PARAMETERS: job_parameters = HA_JOB_PARAMETERS[repo] jenkins_job_trigger(repo, job_parameters, github_opts + ":hacloud=1", CLOUDSRC[pr_branch], ptfdir) ghs_set_status(repo, head_sha1, 'failure' if build_failed else 'pending')
def build_arch(self, arch=None): junk = ['sqlite', 'ssl', 'ffi', 'crypto' ] libs_dir = self.ctx.get_libs_dir(arch.arch) print (sh.ls('-l','{}'.format(libs_dir))) extra_libs = [sh.glob(join('{}', '*' + j + '*').format(libs_dir)) for j in junk] if not extra_libs: info('No junk found.') else: for libs in extra_libs: for lso in libs: warning (lso) python_install_dirs = glob.glob(join(self.ctx.python_installs_dir, '*')) for python_install in python_install_dirs: debug (sh.ls('-l','{}'.format(python_install))) exe_files = sh.glob(join('{}', 'setuptools', '*.exe').format(python_install)) for f in exe_files: print (sh.rm(f))
def split(self): # Call Roche binary # barcode_file = TmpFile.from_string(self.barcode_text) sh.sfffile("-s", "barcodes_keyword", "-mcf", barcode_file.path, self.path) # Check result # produced_files = set(sh.glob('454Reads.*.sff')) expected_files = set([ '454Reads.%s.sff' % (sample.name.upper()) for sample in self.sample_links.values() ]) assert produced_files == expected_files # Make piece objects # self.pieces = [SamplePiece(p, self) for p in sh.glob('454Reads.*.sff')] for piece in self.pieces: piece.rename() # Cleanup # barcode_file.remove()
def assert_git_notes(hgsha1s): gitrepo = os.getcwd() sh.cd(".git/refs/notes") notes_refs = sh.ls(sh.glob("hg-*")).stdout.splitlines() sh.cd(gitrepo) sh.git.notes("--ref=hg", "merge", *notes_refs) output = sh.git.log(pretty="format:%N", notes="hg").stdout notes = [line for line in output.splitlines() if line] assert notes == hgsha1s
def execute(self, name, params, callback=None): self.uuid = uuid.uuid4() self.params = params # print "Opening template " + os.path.join(os.getcwd(), os.path.join('templates', self.template)) run_script = Template(filename=os.path.join('templates', self.template)) if not os.path.exists(self.basedir): os.makedirs(self.basedir) self.pushd(self.basedir) if(os.path.exists(name)): # print "Removing existing output directory: " + os.path.join(os.getcwd(), name) sh.rm('-fr', name) os.mkdir(name) self.pushd(name) # Render execution script template ... out_script = run_script.render(**self.params) # print "Launching trial in directory: " + os.getcwd() out_file = open(self.template, 'w') # ... to a file in our current directory. out_file.write(out_script) out_file.close() # Construct the bro argument string. args = [] if(self.realtime): args.append('--pseudo-realtime') if(self.bare): args.append('-b') args.append('-r') args.append(self.capture) map(lambda x: args.append(x), self.scripts) # print "Launching bro ... " process = self.bro(args, _bg=True) process.wait() std_out = open('.stdout', 'w') std_out.write(process.stdout) std_out.close() std_err = open('.stderr', 'w') std_err.write(process.stderr) std_err.close() if callback: callback(self) sh.rm('-f', sh.glob('*.log')) self.popd() self.popd()
def _collect_mitigation_data(self, sysinfo_path): cpu_dir = Path("/sys/devices/system/cpu/vulnerabilities") results = Path(sysinfo_path, "security-mitigations.txt") if cpu_dir.exists(): sh.grep( "-Hs", ".", sh.glob("/sys/devices/system/cpu/vulnerabilities/*"), _out=str(results), ) try: f = open(results, "a+") sh.contrib.sudo.grep("-Hs", ".", sh.glob("/sys/kernel/debug/x86/*enabled"), _out=f) except sh.ErrorReturnCode: # not enabled so ignore it, (at least on centos8) pass
def transcribe(self, wav_files): start = time.time() self.pool.map(preprocessing, [(wav_files[i], i, self.output_path) for i in range(0, len(wav_files))]) print("Preprocessing: ", time.time() - start) start = time.time() #Extract wav2vec feature featureWritter = EmbeddingDatasetWriter(input_root=self.output_path, output_root=self.output_path, loaded_model=self.w2vec, extension="wav", use_feat=False) featureWritter.write_features() print("Feature extraction: ", time.time() - start) start = time.time() #Prepare dataset for speech to text paths = absoluteFilePaths(self.output_path) paths = [p for p in paths if '.h5context' in p] lines = [] for p in paths: file_name = ntpath.basename(p).replace('.h5context', '') lines.append('\t'.join([file_name, p, '5', 'anh em'])) with open(os.path.join(self.output_path, 'test.lst'), 'w') as f: f.write('\n'.join(lines)) #Decoding on created dataset decode_res = self.decode(os.path.join(self.output_path, 'test.lst'), self.output_path) print("Decoding: ", time.time() - start) trans_file = None for path in absoluteFilePaths(self.output_path): if 'test.lst.hyp' in path: trans_file = path if trans_file == None: print( "An error occurs during decoding. Please run the following command line in a seperate terminal :" ) print(decode_res) transcripts = read_result(trans_file) transcripts = list(transcripts.items()) transcripts = sorted(transcripts, key=lambda x: x[0]) transcripts = [t[1] for t in transcripts] sh.rm(sh.glob(self.output_path + '/*')) return transcripts
def clear_user_data(): """ Empty the user data directory """ path = os.path.join(settings.USER_DATA_DIRECTORY, "*") try: sh.rm(sh.glob(path)) except sh.ErrorReturnCode_1: # directory already empty pass
def align(self, ref_path): # Run it # sh.mothur("#align.seqs(candidate=%s, template=%s, search=blast, flip=false, processors=8);" % (self.path, ref_path)) # Move things # shutil.move(self.path[:-6] + '.align', self.aligned_path) shutil.move(self.path[:-6] + '.align.report', self.report_path) shutil.move(self.path[:-6] + '.flip.accnos', self.accnos_path) # Clean up # if os.path.exists('formatdb.log'): os.remove('formatdb.log') if os.path.exists('error.log') and os.path.getsize('error.log') == 0: os.remove('error.log') for p in sh.glob('mothur.*.logfile'): os.remove(p)
def template_align(self, ref_path): # Run it # sh.mothur("#align.seqs(candidate=%s, template=%s, search=blast, flip=false, processors=8);" % (self.path, ref_path)) # Move things # shutil.move(self.path[:-6] + '.align', self.aligned_path) shutil.move(self.path[:-6] + '.align.report', self.report_path) shutil.move(self.path[:-6] + '.flip.accnos', self.accnos_path) # Clean up # if os.path.exists('formatdb.log'): os.remove('formatdb.log') if os.path.exists('error.log') and os.path.getsize('error.log') == 0: os.remove('error.log') for p in sh.glob('mothur.*.logfile'): os.remove(p)
def expand_filenames(filenames): """ expands the filenames, resolving environment variables, ~ and globs """ res = [] for filename in filenames: filename = os.path.expandvars(os.path.expanduser(filename)) if any((c in filename) for c in "?*["): res += sh.glob(filename) else: res += [filename] return res
def build_arch(self, arch): with current_directory(join(self.get_build_dir(arch.arch))): env = self.get_recipe_env(arch) hostpython = sh.Command(self.ctx.hostpython) shprint( hostpython, 'ref10/build.py', _env=env ) # the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` # or simply `_crypto_sign.so` depending on the platform/distribution sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir()) self.install_python_package(arch)
def copy_assets(): """copy assets for static serving""" proj() print(". copying assets ...") copy_patterns = { "dist": ["./static/lib/jquery-1.8.3.min.js"] + sh.glob("./static/config/*.json") + sh.glob("./static/fragile-min.*"), "dist/font": sh.glob("./static/lib/awesome/font/*"), "dist/svg": sh.glob("./static/svg/*.svg"), "dist/img": sh.glob("./static/img/*.*") or [], "dist/docs/assets": sh.glob("./docs/assets/*.*") or [], } for dst, copy_files in copy_patterns.items(): if not os.path.exists(dst): sh.mkdir("-p", dst) for c_file in copy_files: print "... copying", c_file, dst sh.cp("-r", c_file, dst) wa_cache = "./dist/.webassets-cache" if os.path.exists(wa_cache): sh.rm("-r", wa_cache)
def create_data_dir(repo, config): """Build Quickie data directory (runtimes + html) if none has already been created""" try: data_dir = os.path.join(repo, config.get('data_dir', '.quickiedata')) sh.mkdir('-p', data_dir) config['data_dir_path'] = data_dir dir, _ = os.path.split(__file__) template = os.path.abspath(os.path.join(dir, "data")) # Don't overwrite the data file if it's already created. sh.cp('--no-clobber', template + '/data.json', data_dir) sh.cp(sh.glob(template + '/*.html'), data_dir) sh.cp(sh.glob(template + '/*.js'), data_dir) sh.cp(sh.glob(template + '/*.css'), data_dir) except sh.ErrorReturnCode as e: fatal("Couldn't create data directory:" + str(e))
def main(): logging.debug('start') if YANDEX_SEARCH_ID=="": logging.warn('to enable seach on your site run\n python3 build3.py "http://website.url/" 123\n where 123 is yandex search id obtainable on http://site.yandex.ru/searches/new/') #create and clear output directory if necessary mkdir("-p","_site/") rm("-Rf",glob("_site/*")) #copy static contant cp("-a",glob("_web/*"),"_site/") mv("_site/dot_htaccess","_site/.htaccess") #copy optimized css cssoptimizer(cat(glob("_css/*")),"-i","_site/style.css") #copy optimized js uglifyjs(cat(glob("_js/*")),"-o","_site/scripts.js") #generate content materialize_notes(SOURCE) materialize_template("Y_Search","Y_Search",{"title":"Поиск"}) logging.debug('end.') logging.info('To start copy following url into your browser: \n%sindex.html' % BASE_URL)
def trigger_testbuild(repo, github_opts): pr_id, head_sha1, pr_branch = github_opts.split(':') olddir = os.getcwd() workdir = tempfile.mkdtemp() build_failed = False try: ptfdir = repo + ':' + github_opts webroot = os.path.join(htdocs_dir, ptfdir) if "crowbar" in repo: pkg = repo else: pkg = "crowbar-" + repo spec = pkg + '.spec' sh.rm('-rf', webroot) sh.mkdir('-p', webroot) try: os.chdir(workdir) buildroot = os.path.join(os.getcwd(), 'BUILD') iosc('co', IBS_MAPPING[pr_branch], pkg, '-c') os.chdir(pkg) add_pr_to_checkout(repo, pr_id, head_sha1, pr_branch, spec) repository = 'SLE_12' if pr_branch == 'master' else 'SLE_11_SP3' iosc('build', '--root', buildroot, '--noverify', '--noservice', repository, 'x86_64', spec, _out=sys.stdout) except: build_failed = True print("Build failed: " + str(sys.exc_info()[0])) raise else: sh.cp('-p', sh.glob( os.path.join(buildroot, '.build.packages/RPMS/*/*.rpm')), webroot) finally: os.chdir(olddir) sh.cp('-p', os.path.join(buildroot, '.build.log'), os.path.join(webroot, 'build.log')) finally: sh.sudo.rm('-rf', workdir) if not build_failed: jenkins_job_trigger( repo, github_opts, CLOUDSRC[pr_branch], ptfdir) ghs_set_status( repo, pr_id, head_sha1, 'failure' if build_failed else'pending')
def doComponentCopy(): options = setup() print('Doing component copy: %s' % options.component) cname, cver = options.component.split('-') ecoList = options.dest.split(',') for eco in ecoList: destEco = eco[0] destOS = eco[1:] pattern = '*%s*%s*' % (cname, cver) for operSys in destOS: print('Copying %s-%s to Ecosystem %s.x -> %s' % (cname, cver, destEco, OS_LIST[operSys])) # Remove existing files of the same component/version WORKDIR = '%s/%s' % (ECO_DIR_DICT[destEco], PACKAGETYPE_DICT[OS_LIST[operSys]]) os.chdir(WORKDIR) rm('-fv', sh.glob(pattern), _out=log) # Rsync over the new files RSYNC_SRC = '%s/%s/%s' % (RELEASE_DIR, PKG_DICT[operSys], pattern) rsync(sh.glob('%s' % RSYNC_SRC), '.') return 0
def main(): bodhi_cmds = [] args = config() spec = sh.glob("*.spec")[0] nevr = sh.rpmspec(q=spec).split()[0].rsplit('.', 2)[0] #passwd = getpass.getpass("Enter password:"******"Processing %r" % nevr branch_keys = args.branches.split(',') for branch_key in branch_keys: print "Handling %r" % branch_key branch = [b for b in branches if b['short'] == branch_key][0] nevra = nevr + '.' + branch['long'] print "Working on %r, %r" % (branch['short'], nevra) p = sh.git.checkout(branch['short'], **io) p.wait() # Merge, push, build p = sh.git.merge("master", **io) p.wait() # This is ridiculous #p = sh.fedpkg.push(**io) #p.wait() os.system("git push --set-upstream origin " + branch['short']) time.sleep(3) if not args.skip: if args.forgive: try: p = sh.fedpkg.build(**io) p.wait() except Exception, e: print "*" * 30 print str(e) print "*" * 30 else: p = sh.fedpkg.build(**io) p.wait() cmd = "bodhi %s --new --user %s --type %s --notes \"%s\"" % ( nevra, args.user, args.type, args.notes, ) if args.bugs: cmd += " -b %s" % args.bugs print cmd #foo = raw_input("Create a bodhi update in another window... (continue)") bodhi_cmds.append(cmd)
def doInstallerCopy(): options = setup() # Put mapr-setup.sh on root for installer folder output = rsync(RSYNC_EXTRA_OPTS, '%s/yum/qa/%s/mapr-setup.sh' % (RELEASE_DIR, INSTALLER_VER), '%s/installer/' % BASE_DIR) # Redhat RSYNC_SRC = '%s/yum/qa/%s/mapr-*' % (RELEASE_DIR, INSTALLER_VER) output = rsync(RSYNC_EXTRA_OPTS, '--delete', sh.glob(RSYNC_SRC), '%s/installer/redhat/' % BASE_DIR) # Ubuntu RSYNC_SRC = '%s/qa/%s/mapr-*' % (RELEASE_DIR, INSTALLER_VER) output = rsync(RSYNC_EXTRA_OPTS, sh.glob(RSYNC_SRC), '%s/installer/ubuntu/' % BASE_DIR) RSYNC_SRC = '%s/qa/%s/dists/binary/mapr-*' % (RELEASE_DIR, INSTALLER_VER) output = rsync(RSYNC_EXTRA_OPTS, '--delete', sh.glob(RSYNC_SRC), '%s/installer/ubuntu/dists/binary/' % BASE_DIR) print 'Reindexing Installer redhat and ubuntu directories' doInstallerReindex()
def run(self): key_file = 'data/key-{}'.format(self.essid) while not os.path.exists(key_file) and not self._terminated: self._process = aircrack_ng( '-q', '-l', key_file, '-e', self.essid, glob('data/*.cap'), _err=lambda l: 0 ) logger.debug('Not enough data to crack the key, trying again in %s seconds', settings.AIRCRACK_DELAY_BETWEEN_TRIES) time.sleep(settings.AIRCRACK_DELAY_BETWEEN_TRIES) with open(key_file, 'r') as f: self.key = f.read()
def main(): if os.getuid() != 0: raise Exception('Need to be root') from sh import aircrack_ng, airodump_ng, aireplay_ng, iwconfig, ifconfig parser = ArgumentParser() parser.add_argument('-i', '--use-if', default='', help='List of comma separated interfaces the script can use. If empty all interfaces are used. Default: empty.') args = parser.parse_args() exclude_interface = [dev.strip() for dev in args.use_if.split(',') if dev] logging.debug('Cleaning up previous data') rm(glob('data/*'), _ok_code=[0,1]) while True: start(exclude_interface)
def analyze_results(scenarii): cd(BUILD_DIR) return to_plot = [] seismo_glob = "{}/inout/OUTPUT_FILES/*.ascii".format(VERSIONS_TO_TEST[0]) to_plot.append("set terminal pdf") to_plot.append("set output 'out.pdf'") for seismo in sh.glob(seismo_glob): plot = "plot " for i, version in enumerate(VERSIONS_TO_TEST): right_seismo = seismo.replace(VERSIONS_TO_TEST[0], version) plot += "'{}' using 1:2 {}".format( right_seismo, "" if i == len(VERSIONS_TO_TEST) - 1 else ", ") to_plot.append(plot) to_plot.append("quit") plotter = gnuplot(_in=[p + "\n" for p in to_plot])
def favicon(): proj() print(". generating favicons...") sizes = [16, 32, 64, 128] tmp_file = lambda size: "/tmp/favicon-%s.png" % size for size in sizes: print("... %sx%s" % (size, size)) sh.convert("svg/logo.svg", "-resize", "%sx%s" % (size, size), tmp_file(size)) print(".. generating bundle") sh.convert(*[tmp_file(size) for size in sizes] + ["-colors", 256, "dist/favicon.ico"]) print(".. cleaning up") sh.rm(sh.glob("/tmp/favicon-*.png"))
def delete_empty_directories(self): """ Delete all the empty directories inside one """ self.logger.log(logging.INFO, "> Suppression des dossiers vides...") # get the list of all empty folder in the repository try: s_list_empty_dir = sh.find(sh.glob(self.s_repository_path + '/*'), '-type', 'd', '-empty') a_list_empty_dir = s_list_empty_dir.split('\n') # browse the list and delete all the empty directory for sDirectory in a_list_empty_dir: if os.path.isdir(sDirectory): os.removedirs(sDirectory) except sh.ErrorReturnCode_1: self.logger.log( logging.INFO, "Il n'y a pas de sous dossier pour le moment," " donc il n'y a pas de dossier vide à supprimer.")