def compile( self, source_dir, build_dir, install_dir ): package_source_dir = os.path.join( source_dir, self.dirname ) assert( os.path.exists( package_source_dir ) ) package_build_dir = os.path.join( build_dir, self.dirname ) sh.cd( os.path.join( package_source_dir, 'scripts/Resources' ) ) sh.sh( './copyresources.sh' ) # the install target doesn't copy the stuff that copyresources.sh puts in place sh.cp( '-v', os.path.join( package_source_dir, 'bin/Release/Readme.txt' ), os.path.join( install_dir, 'Readme.meshy.txt' ) ) sh.cp( '-v', '-r', os.path.join( package_source_dir, 'bin/Release_Linux/Resources/' ), install_dir ) sh.mkdir( '-p', package_build_dir ) sh.cd( package_build_dir ) if ( platform.system() == 'Darwin' ): sh.cmake( '-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ), package_source_dir, _out = sys.stdout ) sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout ) else: sh.cmake( '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ), package_source_dir, _out = sys.stdout ) sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout ) sh.make.install( _out = sys.stdout )
def run(program_name, version, location_path): '''Run the given program name and version where the specified program's parent directory is located at the location path''' program_index = version[0] program_type = version[1] core.vuls_utils.error_checking(program_name, program_index, program_type) # Eventually this needs to be a regex to get the version num from the # prefix of the version code bug_index = program_index bug_entry = core.vuls_utils.PROG_INFO[program_name.upper()][bug_index] run_cmd = bug_entry['run_cmd'] total_run_path = os.path.join(location_path, bug_entry['run_subdir']) try: sh.cd(total_run_path) except sh.ErrorReturnCode: print("No file found at: {}".format(total_run_path)) print("Check parent directory: {}".format(location_path)) exit(1) print("{} version {}".format(program_name, version)) print("working directory: {}".format(total_run_path)) print("Running: {}".format(run_cmd)) # TODO add flag to run with recording os.system(run_cmd) choice = input('Press enter to shut down tomcat') if program_name.lower() == 'tomcat80': tomcat_path = total_run_path atexit.register(stop_tomcat)
def setUp(self): self.dir = tempfile.mkdtemp() sh.cd(self.dir) with open('.coveralls.mock', 'w+') as fp: fp.write('repo_token: xxx\n') fp.write('service_name: jenkins\n')
def git_clone_to_local(dest_directory, webhook_data): git = sh.git.bake() logger.debug('Making destination directory %s' % dest_directory) print ('Making destination directory %s' % dest_directory) sh.mkdir('-p', dest_directory) sh.cd(dest_directory) logger.debug("checking for repo_name %s in %s" % (webhook_data.repo_name, sh.pwd())) if not os.path.exists(webhook_data.repo_name): logger.debug("Cloning new repository") print(git.clone(webhook_data.repo_url, webhook_data.repo_name)) sh.cd(webhook_data.repo_name) print(git.fetch('--all')) try: git('show-ref', '--heads', webhook_data.branch_name) branch_exists = True except: branch_exists = False if branch_exists is False and not webhook_data.is_tag(): print(git.checkout('-b', webhook_data.branch_name, 'origin/%s' % webhook_data.branch_name)) elif branch_exists: git.checkout(webhook_data.branch_name) print(git.pull()) print webhook_data.before, webhook_data.after
def doInstallerReindex(): sh.cd('%s/installer/redhat/' % (BASE_DIR)) sh.createrepo('.') sh.cd('%s/installer/ubuntu' % (BASE_DIR)) run = sh.Command('./update-archive.sh') run('.') return True
def test_run(cli): with cli.isolated_filesystem(): sh.touch(dotenv_path) sh.cd(here) dotenv.set_key(dotenv_path, 'FOO', 'BAR') result = sh.dotenv('run', 'printenv', 'FOO').strip() assert result == 'BAR'
def BuildYcmdLibs(cmake_args): build_dir = unicode(sh.mktemp('-d', '-t', 'ycm_build.XXXXXX')).strip() try: full_cmake_args = ['-G', 'Unix Makefiles'] if OnMac(): full_cmake_args.extend(CustomPythonCmakeArgs()) full_cmake_args.extend(cmake_args) full_cmake_args.append(p.join(DIR_OF_THIS_SCRIPT, 'cpp')) sh.cd(build_dir) sh.cmake(*full_cmake_args, _out=sys.stdout) build_target = ('ycm_support_libs' if 'YCM_TESTRUN' not in os.environ else 'ycm_core_tests') sh.make('-j', NumCores(), build_target, _out=sys.stdout, _err=sys.stderr) if 'YCM_TESTRUN' in os.environ: RunYcmdTests(build_dir) finally: sh.cd(DIR_OF_THIS_SCRIPT) sh.rm('-rf', build_dir)
def getfiles(userpath): filepath=[] userpath = os.path.abspath(userpath) contents=os.walk(userpath) temp = contents temp_list=list(temp) if len(temp_list)==0: #This means that either the path points to a single file or a non-existent file/folder. try: with open(userpath) as f: pass return userpath.split() #Applied split function to convert the string to a list. except IOError: print 'Invalid path.' sys.exit() contents=os.walk(userpath) raw_files = contents.next() files = sh.ls(str(raw_files[0]), '-R') files = str(files).split() ff = [] for i in xrange(len(files)): if files[i][-1] == ':': folder = files[i][:-1] continue try: sh.cd(folder + '/' + files[i]) continue except OSError: ff.append(folder + '/' + files[i]) return ff
def test_push_conflict_default(git_dir, hg_repo): git_repo = clone_repo(git_dir, hg_repo) sh.cd(hg_repo) make_hg_commit("b") sh.cd(git_repo) make_git_commit("c") assert sh.git.push(_ok_code=1).stderr.find("master -> master (non-fast-forward)") > 0
def give_user_ztanesh(unix_user): """ Make sure our UNIX user runs ZtaneSH shell it is more productive to work with Plone sites. https://github.com/miohtama/ztanesh """ from sh import git from sh import chsh home = get_unix_user_home(unix_user) # Install ZtaneSH if not os.path.exists("%s/tools" % home): print "Installing ZtaneSH for user %s" % unix_user with sudo(i=True, u=unix_user, _with=True): cd(home) git("clone", "git://github.com/miohtama/ztanesh.git", "tools") setup = "%s/tools/zsh-scripts/setup.zsh" % home run = Command(setup) run() # Set user default shell with sudo: chsh("-s", "/bin/zsh", unix_user)
def profile(self, profile_settings): # cd into repo sh.cd(self.repo.name()) # create the initial profiler profiler = get_profiler(profile_settings, self.verbose) # determine how to traverse hashes iter = get_hash_iterator(traversal_type=profile_settings['traversal']) # do the traversal self.profiles_by_hash = {} for git_hash in iter(self.repo.get_hashes()): # first checkout this version of the code self.repo.checkout_hash(git_hash) # then build try: self.build() except: vprint(self.verbose, 'Failed to build for hash \'{}\''.format(git_hash)) # now do the profiling prof = profiler.profile(self.repo) self.profiles_by_hash[git_hash] = prof # step back out of repo sh.cd('..')
def setUp(self): super(TestServerOk, self).setUp() self.dir = tempfile.mkdtemp() sh.cd(self.dir) sh.git.init() sh.git('config', 'user.name', '"Guido"') sh.git('config', 'user.email', '"*****@*****.**"') sh.touch('README') sh.git.add('.') sh.git.commit('-am', 'first commit') sh.git.tag('-a', 'jenkins-release-1', '-m', 'Release 1') sh.touch('file1') sh.git.add('.') sh.git.commit('-am', 'second commit #777 #123') sh.git.tag('-a', 'jenkins-release-2', '-m', 'Release 2', _env={"GIT_COMMITTER_DATE": "2006-04-07T22:13:13"}) sh.touch('file2') sh.git.add('.') sh.git.commit('-am', '#67 third commit') sh.git.tag('-a', 'jenkins-release-3', '-m', 'Release 3') self.prepare_client() self.valid_data = { 'build_number': '42', 'build_tag': 'jenkins-release-2', 'previous_tag': 'jenkins-release-1', 'job_url': 'http://jenkins_url/jobs/2/', 'repo': self.dir, 'instance': 'TestServer', }
def main(): if len(sys.argv) == 2: sh.cd(sys.argv[1]) print(sh.git("status")) print("(Y/n): Are you sure you want to reset this directory?") temp = str(input("Local changes will be deleted: ")) if temp == "y" or temp == "Y": print(sh.git.reset("--hard", "HEAD")) print(sh.git.clean("-f")) print(sh.git.pull) print(sh.git("status")) else: sys.exit(0) else: print(sh.git("status")) print("(Y/n): Are you sure you want to reset this directory?") temp = str(input("Local changes will be deleted: ")) if temp == "y" or temp == "Y": print(sh.git.reset("--hard", "HEAD")) print(sh.git.clean("-f")) print(sh.git.pull) print(sh.git("status")) else: sys.exit(0)
def redeployall(directory): print("Redeploying") # Check if the RSS configuration file exists if isfile("/etc/default/rss/database.properties"): rm("/etc/default/rss/database.properties") cp(rss.get("url").split("/")[-1][:-4] + "/properties/database.properties", "/etc/default/rss/database.properties") # Setting RSS database configuration with open("/etc/default/rss/database.properties") as f: text = f.read() text = text.replace("database.url=jdbc:mysql://localhost:3306/RSS", "database.url=jdbc:mysql://{}:{}/{}".format(DBHOST, DBPORT, rss.get('bbdd')))\ .replace("database.username=root", "database.username={}".format(DBUSER))\ .replace("database.password=root", "database.password={}".format(DBPWD)) with open("/etc/default/rss/database.properties", "w+") as f: f.write(text) if not isfile("/etc/default/rss/oauth.properties"): cp(rss.get("url").split("/")[-1][:-4] + "/properties/oauth.properties", "/etc/default/rss/oauth.properties") for api in get_apis_default(directory, APIS + [rss]): url = api.get("url") name = url.split("/")[-1][:-4] print("cd {}".format(name)) cd(name) print("Redeploy {}".format(api.get("war"))) deploy(api.get("war"), api.get("root"), True) print("End deploy") cd("..")
def gitrepo(root): '''Construct a dictionary holding all the Git data that can be found.''' oldpwd = sh.pwd().strip() sh.cd(root) gitlog = sh.git('--no-pager', 'log', '-1', pretty="format:%s" % FORMAT).split('\n', 7) branch = (os.environ.get('CIRCLE_BRANCH') or os.environ.get('TRAVIS_BRANCH', sh.git('rev-parse', '--abbrev-ref', 'HEAD').strip())) remotes = [x.split() for x in sh.git.remote('-v').strip().splitlines() if x.endswith('(fetch)')] sh.cd(oldpwd) return { "head": { "id": gitlog[0], "author_name": gitlog[1], "author_email": gitlog[2], "author_timestamp": gitlog[3], "committer_name": gitlog[4], "committer_email": gitlog[5], "committer_timestamp": gitlog[6], "message": gitlog[7].strip(), }, "branch": branch, "remotes": [{'name': r[0], 'url': r[1]} for r in remotes] }
def test_git_dir_from_subdir(self): sh.git('init') sh.mkdir('foo') expected = os.path.join(os.getcwd(), '.git') sh.cd('foo') self.assertEqual(expected, git_dir())
def repo_version(repo, version_type='patch', write_version=False): assert version_type in ('patch', 'minor', 'major') metadata_updated = False sh.cd(os.path.join(GITENBERG_DIR, repo)) metadata_path = os.path.join(GITENBERG_DIR, repo, "metadata.yaml") if os.path.exists(metadata_path): md = metadata.pandata.Pandata(metadata_path) _version = md.metadata.get("_version") next_func = getattr(semantic_version.Version(_version), "next_{}".format(version_type)) _next_version = unicode(next_func()) if write_version: md.metadata["_version"] = _next_version with open(metadata_path, 'w') as f: f.write(yaml.safe_dump(md.metadata,default_flow_style=False,allow_unicode=True)) metadata_updated = True return (_version, _next_version, metadata_updated) else: return (None, None, False)
def main(): if len(sys.argv) == 2: sh.cd(sys.argv[1]) print(sh.git("status")) print("(Y/n): Are you sure you want to reset this directory?") temp = str(input("Local changes will be deleted: ")) if temp=="y" or temp =="Y": print(sh.git.reset("--hard", "HEAD")) print(sh.git.clean("-f")) print(sh.git.pull) print(sh.git("status")) else: sys.exit(0) else: print(sh.git("status")) print("(Y/n): Are you sure you want to reset this directory?") temp = str(input("Local changes will be deleted: ")) if temp=="y" or temp =="Y": print(sh.git.reset("--hard", "HEAD")) print(sh.git.clean("-f")) print(sh.git.pull) print(sh.git("status")) else: sys.exit(0)
def checkout(self): repo_co_dir = self.parent.checkout_dir[self.name] if not repo_co_dir.exists(): cd(+self.parent.checkout_dir) call([self.vcs, "clone", self.url, self.name]) cd(+repo_co_dir) return repo_co_dir
def setUp(self): self.powerline = mock.MagicMock() self.dirname = tempfile.mkdtemp() sh.cd(self.dirname) sh.git("init", ".") self.segment = git_stash.Segment(self.powerline, {})
def check_repositories(config): cwd = os.getcwd() config.checkout_dir.makedirs(0o755, exist_ok=True) for repo in config.repositories : if repo.private : config.pvt_pkgs.add(repo.name) repo.build_dist_for() cd(cwd)
def repo_version(repo, version_type='patch', write_version=False): assert version_type in ('patch', 'minor', 'major') metadata_updated = False sh.cd(os.path.join(GITENBERG_DIR, repo)) metadata_path = os.path.join(GITENBERG_DIR, repo, "metadata.yaml") if os.path.exists(metadata_path): md = metadata.pandata.Pandata(metadata_path) _version = md.metadata.get("_version") next_func = getattr(semantic_version.Version(_version), "next_{}".format(version_type)) _next_version = unicode(next_func()) if write_version: md.metadata["_version"] = _next_version with open(metadata_path, 'w') as f: f.write( yaml.safe_dump(md.metadata, default_flow_style=False, allow_unicode=True)) metadata_updated = True return (_version, _next_version, metadata_updated) else: return (None, None, False)
def testCamlistoreFileWriteAndCamputCompatibility(self): # Create a 1MB random file test_file = tempfile.NamedTemporaryFile() test_file.write(os.urandom(int(1.5 * (1024 << 10)))) test_file.seek(0) log.debug('Random file generated') log.info('Putting file with camput file:') old_pwd = os.getcwd() sh.cd(CAMLIPY_CAMLISTORE_PATH) camput_blobref = sh.devcam('put', 'file', test_file.name) sh.cd(old_pwd) log.info('Camput blobRef: {0}'.format(camput_blobref)) file_writer = FileWriter(self.server, fileobj=test_file) file_writer.chunk() camplipy_blobref = file_writer.bytes_writer() log.info('Camlipy blobRef: {0}'.format(camplipy_blobref)) log.info('FileWriter cnt={0}'.format(file_writer.cnt)) # Check that no data blob has been uploaded, # since we just uploaded the same file with camput file. # "<= 1" since sometimes, camlipy make a slightly bigger end blob. self.assertTrue(file_writer.cnt['uploaded'] <= 1)
def new_travis_template(repo, template, write_template=False): """ compute (and optionally write) .travis.yml based on the template and current metadata.yaml """ template_written = False sh.cd(os.path.join(GITENBERG_DIR, repo)) metadata_path = os.path.join(GITENBERG_DIR, repo, "metadata.yaml") travis_path = os.path.join(GITENBERG_DIR, repo, ".travis.yml") travis_api_key_path = os.path.join(GITENBERG_DIR, repo, ".travis.deploy.api_key.txt") md = metadata.pandata.Pandata(metadata_path) epub_title = slugify(md.metadata.get("title")) encrypted_key = open(travis_api_key_path).read().strip() repo_name = md.metadata.get("_repo") template_vars = { 'epub_title': epub_title, 'encrypted_key': encrypted_key, 'repo_name': repo_name, 'repo_owner': 'GITenberg' } template_result = template.render(**template_vars) if write_template: with open(travis_path, "w") as f: f.write(template_result) template_written = True return (template_result, template_written)
def __exit__(self, type, value, traceback): """Reseting the current working directory, and run synchronization if enabled. """ sh.cd(self.old_cwd) logger.info("Back to %s", self.old_cwd) shutil.rmtree(self.tmpd)
def RunYcmdTests( build_dir ): tests_dir = p.join( build_dir, 'ycm/tests' ) sh.cd( tests_dir ) new_env = os.environ.copy() new_env[ 'LD_LIBRARY_PATH' ] = DIR_OF_THIS_SCRIPT sh.Command( p.join( tests_dir, 'ycm_core_tests' ) )( _env = new_env, _out = sys.stdout )
def ensure_syncer_dir(): if path.isdir(syncer_dir): return username = input('GitHub username: '******'GitHub password: '******'clone', 'https://%s:%[email protected]/%s/%s.git' % (username, password, username, SYNCER_REPO_NAME), syncer_dir) needs_commit = False sh.cd(syncer_dir) if not path.isfile(path('manifest.json')): sh.touch('manifest.json') if not path.isdir(path('content')): sh.mkdir('content') if not path.isdir(path('backup')): sh.mkdir('backup') if not path.isfile(path('.gitignore')): needs_commit = True with open('.gitignore', 'w') as gitignore_file: gitignore_file.write('backup') if needs_commit: sh.git('add', '-A') sh.git('commit', '-m', 'Setting up scaffolding.')
def git_code(self): log_print("开始git代码...") if self.operation == "rollback": log_print("不需要拉取代码.") return True if self.git_branch == None: self.git_branch = "master" sh.cd('%s/../work_space' % BASE_PATH) if os.path.exists(self.deploy_name): sh.rm('-fr', self.deploy_name) sh.git('clone', '-b', self.git_branch, self.git_uri, self.deploy_name, _out=process_output) sh.cd(self.git_home_path) self._git_info() log_print("开始mvn编译部分...") sh.mvn('clean', 'package', '-U', '-Dmaven.test.skip=true', _out=process_output) if not os.path.exists('target/%s' % self.package_file): log_print("mvn编译失败.") os._exit(4) log_print("mvn编译成功") sh.mkdir('docker-work')
def gitrepo(root): tmpdir = sh.pwd().strip() sh.cd(root) gitlog = sh.git('--no-pager', 'log', '-1', pretty="format:%s" % FORMAT).split('\n', 5) branch = os.environ.get('CIRCLE_BRANCH') or os.environ.get( 'TRAVIS_BRANCH', sh.git('rev-parse', '--abbrev-ref', 'HEAD').strip()) remotes = [ x.split() for x in filter(lambda x: x.endswith('(fetch)'), sh.git.remote('-v').strip().splitlines()) ] sh.cd(tmpdir) return { "head": { "id": gitlog[0], "author_name": gitlog[1], "author_email": gitlog[2], "committer_name": gitlog[3], "committer_email": gitlog[4], "message": gitlog[5].strip(), }, "branch": branch, "remotes": [{ 'name': remote[0], 'url': remote[1] } for remote in remotes] }
def test_run(tmp_path): dotenv_file = tmp_path / '.env' dotenv_file.touch() sh.cd(str(tmp_path)) dotenv.set_key(str(dotenv_file), 'FOO', 'BAR') result = sh.dotenv('run', 'printenv', 'FOO').strip() assert result == 'BAR'
def compile(self, source_dir, build_dir, install_dir): package_source_dir = os.path.join(source_dir, self.dirname) assert (os.path.exists(package_source_dir)) package_build_dir = os.path.join(build_dir, self.dirname) sh.mkdir('-p', package_build_dir) sh.cd(package_build_dir) if (platform.system() == 'Darwin'): sh.cmake('-G', 'Xcode', '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out=sys.stdout) sh.xcodebuild('-scheme', 'install', '-configuration', 'Release', _out=sys.stdout) else: sh.cmake('-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir, package_source_dir, _out=sys.stdout) sh.make('-j4', 'VERBOSE=1', _out=sys.stdout) sh.make.install(_out=sys.stdout)
def transfer(event_path_this_rank): root_path = str(sh.pwd())[:-1] for thedir in event_path_this_rank: sh.cd(thedir) stdin_list = [] for sacfile in glob.glob("*.SAC"): net, sta, loc, chn = sacfile.split('.')[0:4] pz = glob.glob(f"SAC_PZs_{net}_{sta}_{chn}_{loc}_*_*") if (len(pz) != 1): logger.error( f"[rank:{rank},dir:{thedir}] error in transfering for {sacfile} in seeking {pz}" ) continue # logger.info( # f"[rank:{rank},dir:{thedir}] transfer {sacfile} with {pz}") stdin_list.append(f"r {sacfile}\n") stdin_list.append(f"rmean; rtr; taper \n") stdin_list.append( f"trans from pol s {pz[0]} to none freq 0.001 0.005 5 10\n") stdin_list.append(f"mul 1.0e9 \n") stdin_list.append("w over\n") stdin_list.append(f"q\n") sh.sac(_in=stdin_list) sh.cd(root_path)
def transfer_directory_content(self, localDirectory: str, cyverseDirectory: str): """ Transfers the content from the localDirectory to the cyverseDirectory - Needs internet connection. """ try: # Change the working directory to the local directory FROM where data needs to be transferred sh.cd(localDirectory) # On icommands, change the working directory to the CyVerse directory TO where the data needs to be transferred sh.icd(cyverseDirectory) # Upload the data to CyVerse sh.iput("-r", "-f", ".") self.logger.write("Content of " + localDirectory + " is transfered to " + cyverseDirectory) # Change the local working directory to the original working directory sh.cd(self.workingDirectory) # Remove the local directory FROM where the data is already uploaded in earlier steps sh.rm("-r", localDirectory) self.logger.write("Removed local content from " + localDirectory) # Reset the directory structure os.makedirs(localDirectory) except Exception as e: # If something goes wrong, print the message on the console self.logger.write(str(e)) self.logger.write( self.logger.write("Data transfer unsuccessful from " + localDirectory + " to " + cyverseDirectory))
def setUp(self): self.powerline = mock.MagicMock() git.Color = mock.MagicMock() self.dirname = tempfile.mkdtemp() sh.cd(self.dirname) sh.git("init", ".")
def process(self, *, skip_ocr=False): # Prepare directories self.prepare_directories() cd(self.workdir) # Scan pages self.scan_pages() # Combine tiffs into single multi-page tiff self.combine_tiffs() # Convert tiff to pdf self.convert_tiff_to_pdf() # Do OCR if skip_ocr is False: self.do_ocr() filename = 'clean.pdf' else: filename = 'output.pdf' # Move file print(prefix() + 'Moving resulting file...') cd('..') mv('{}/{}'.format(self.workdir, filename), self.output_path) print('\nDone: %s' % self.output_path)
def maven(direct, *extra): print("cd {}".format(direct)) cd(direct) print("Maven install") mvn("install", *extra) print("Maven done") cd("..")
def get_new_files(input): sh.cd(input) git = sh.git return [ x[3:].rstrip('\n') for x in git.status('--porcelain') if x.startswith('?? ') ]
def build_opencv(): sh.pip.install("numpy") clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0) clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0") sh.rm("-rf", "build") sh.mkdir("build") sh.cd("build") python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent version = "{}.{}".format(sys.version_info.major, sys.version_info.minor) sh.cmake( "..", "-DCMAKE_BUILD_TYPE=RELEASE", "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path), "-DWITH_CUDA=OFF", "-DWITH_FFMPEG=OFF", "-DINSTALL_C_EXAMPLES=OFF", "-DBUILD_opencv_legacy=OFF", "-DBUILD_NEW_PYTHON_SUPPORT=ON", "-DBUILD_opencv_python3=ON", "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-3.4.1/modules", "-DBUILD_EXAMPLES=ON", "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path), "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version), "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version), "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version), "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version), _out=sys.stdout, ) sh.make("-j4", _out=sys.stdout) sh.make.install(_out=sys.stdout)
def check_repositories(config): cwd = os.getcwd() config.checkout_dir.makedirs(0o755, exist_ok=True) for repo in config.repositories: if repo.private: config.pvt_pkgs.add(repo.name) repo.build_dist_for() cd(cwd)
def __exit__(self, type, value, traceback): """Reseting the current working directory, and run synchronization if enabled. """ sh.cd(self.old_work_dir) logger.info("Back to %s", self.old_work_dir) shutil.rmtree(self.tmp_work_dir)
def checkout(self): repo_co_dir=self.parent.checkout_dir[self.name] if not repo_co_dir.exists(): cd(+self.parent.checkout_dir) call([self.vcs, "clone", self.url, self.name]) cd(+repo_co_dir) return repo_co_dir
def build_opencv(): sh.pip.install("numpy") clone_if_not_exists("opencv", "https://github.com/PolarNick239/opencv.git", branch="stable_3.0.0) clone_if_not_exists("opencv_contrib", "https://github.com/PolarNick239/opencv_contrib.git", branch="stable_3.0.0") sh.rm("-rf", "build") sh.mkdir("build") sh.cd("build") python_path = pathlib.Path(sh.pyenv.which("python").stdout.decode()).parent.parent version = "{}.{}".format(sys.version_info.major, sys.version_info.minor) sh.cmake( "..", "-DCMAKE_BUILD_TYPE=RELEASE", "-DCMAKE_INSTALL_PREFIX={}/usr/local".format(python_path), "-DWITH_CUDA=OFF", "-DWITH_FFMPEG=OFF", "-DINSTALL_C_EXAMPLES=OFF", "-DBUILD_opencv_legacy=OFF", "-DBUILD_NEW_PYTHON_SUPPORT=ON", "-DBUILD_opencv_python3=ON", "-DOPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-4.3.2/modules", "-DBUILD_EXAMPLES=ON", "-DPYTHON_EXECUTABLE={}/bin/python".format(python_path), "-DPYTHON3_LIBRARY={}/lib/libpython{}m.so".format(python_path, version), "-DPYTHON3_PACKAGES_PATH={}/lib/python{}/site-packages/".format(python_path, version), "-DPYTHON3_NUMPY_INCLUDE_DIRS={}/lib/python{}/site-packages/numpy/core/include".format(python_path, version), "-DPYTHON_INCLUDE_DIR={}/include/python{}m".format(python_path, version), _out=sys.stdout, ) sh.make("-j4", _out=sys.stdout) sh.make.install(_out=sys.stdout)
def test_pull_tags(git_dir, hg_repo): git_repo = clone_repo(git_dir, hg_repo) sh.cd(hg_repo) sh.hg.tag("tag1") sh.cd(git_repo) sh.git.pull() assert "tag1" in sh.git.tag()
def test_clone_divergent_bookmarks(hg_repo, git_dir): sh.cd(hg_repo) sh.hg.bookmark("bookmark_one") make_hg_commit("b") sh.hg.update(rev=0) make_hg_commit("c") sh.hg.bookmark("bookmark_two") make_hg_commit("d") clone_repo(git_dir, hg_repo) result = sh.git.branch(remote=True) print result.stdout assert result.stdout == """ origin/HEAD -> origin/master origin/bookmark_one origin/bookmark_two origin/master """ sh.git.checkout("origin/bookmark_one") assert_git_count(2) assert_git_messages(['b', 'a']) sh.git.checkout("origin/bookmark_two") assert_git_count(3) assert_git_messages(['d', 'c', 'a'])
def test_pull_tag_with_spaces(git_dir, hg_repo): git_repo = clone_repo(git_dir, hg_repo) sh.cd(hg_repo) sh.hg.tag("tag one") sh.cd(git_repo) sh.git.pull() assert "tag___one" in sh.git.tag()
def test_python_script(): print("test_python_script") executable = os.path.realpath("subtitle-downloader/subtitle_downloader.py") # should try all languages and print whether each download was successful # -a option should override language specifications, even invalid ones for file in VIDEO_FILES: sh.python(executable, "--verbose", "-a", "--language", "Engrish", file) sh.python(executable, VIDEO_DIRECTORY) # test video directory # gather a list of subtitle files created before removing them sub_files = glob.glob(VIDEO_DIRECTORY + "*.srt") sh.Command("rm")("-rf", sub_files) # change directories to the path containing the video files current_path = os.path.realpath(__file__) sh.cd(os.path.abspath(VIDEO_DIRECTORY)) # run vanilla command and ensure that the result is the same as before sh.python(executable) sub_files2 = glob.glob(VIDEO_DIRECTORY + "*.srt") try: assert sub_files == sub_files2 except AssertionError: print("Error: given no arguments, script should search the current directory") sh.cd(os.path.dirname(current_path))
def make_release(flavors, custom_channel, custom_app): sh.cd(PATH_PREFIX) # ./gradlew -q assembleDevDebug args = [GRADLEW, '-q', 'clean', '-PcustomChannel=' + custom_channel, '-PcustomApplicationId=' + custom_app] tasks = ['assemble{0}Release'.format(flavor.title()) for flavor in flavors] args += tasks subprocess.call(args)
def ensure_repo_consistency(self): # Use sh for git commands import sh # Always work in the same directory as the repository is in sh.cd(self.repo_location) # (Re-)init the repository sh.git.init(self.repo_location) # Set user info current_user = get_current_user() sh.git.config('user.name', current_user) sh.git.config('user.email', '{}@{}'.format(current_user, socket.getfqdn())) # Default branch is called 'main' sh.git.checkout('-B', 'main') # Add all files sh.git.add('-A', self.repo_location) has_channges = 'nothing to commit' not in sh.git.status() # And commit changes if there are any if has_channges: sh.git.commit('-m', 'Committing latest changes')
def generate_template(): template_file = "" if not isdir(build_dir): mkdir(build_dir) if isdir(build_dir): template_file = build_dir + "/dekko.dekkoproject.pot" print("TemplateFile: " + template_file) cd(build_dir) print("Running cmake to generate updated template") cmake('..') print("Running make") make("-j2") if isfile(template_file): if isdir(po_dir): print("Moving template to po dir: " + po_dir) mv(template_file, po_dir) else: print("Couldn't find po dir: " + po_dir) cleanup() return else: cleanup() print("No template found for: " + template_file) return print("Cleaning up") cleanup() print("YeeHaa!") print("All done, you need to commit & push this to bitbucket now :-)") print("NOTE: this would also be a good time to sync with launchpad, run") print(" $ python3 launchpad_sync.py")
def make_release(flavors, custom_channel): sh.cd(PATH_PREFIX) # ./gradlew -q assembleDevDebug args = [GRADLEW, '-q', 'clean', '-PcustomChannel=' + custom_channel] tasks = ['assemble{0}Release'.format(flavor.title()) for flavor in flavors] args += tasks subprocess.call(args)
def create(version_number): heading1("Creating new version based on Fedora " + version_number + "\n") # Update git and create new version. heading2("Updating master branch.") print(git.checkout("master")) print(git.pull()) # Bring branch current. heading2("Creating new branch") print(git.checkout("-b" + version_number)) # Create working branch. # Get kickstart files. heading2("Creating fedora-kickstarts directory\n") mkdir("-p", (base_dir + "/fedora-kickstarts/")) cd(base_dir + "/fedora-kickstarts/") heading2("Downloading Fedora kickstart files.") ks_base = "https://pagure.io/fedora-kickstarts/raw/f" \ + version_number + "/f" for file in ks_files: file_path = ks_base + "/fedora-" + file print ("Downloading " + file_path) curl("-O", file_path)
def RunYcmdTests(build_dir): tests_dir = p.join(build_dir, 'ycm/tests') sh.cd(tests_dir) new_env = os.environ.copy() new_env['LD_LIBRARY_PATH'] = DIR_OF_THIS_SCRIPT sh.Command(p.join(tests_dir, 'ycm_core_tests'))(_env=new_env, _out=sys.stdout)
def main(): sh.git.clone('https://github.com/creationix/nvm.git') sh.source('~/nvm/nvm.sh') sh.nvm.install('v9.6.1') sh.nvm.alias('default', 'v9.6.1') sh.cd('rumble-react') sh.npm('install')
def BuildOmniSharp(): build_command = PathToFirstExistingExecutable( [ 'msbuild', 'msbuild.exe', 'xbuild' ] ) if not build_command: sys.exit( 'msbuild or xbuild is required to build Omnisharp' ) sh.cd( p.join( DIR_OF_THIS_SCRIPT, 'third_party/OmniSharpServer' ) ) sh.Command( build_command )( _out = sys.stdout )