def file_backup_restore(process, action): if action == 'backup': with sh.pushd('/opt/anaconda'): sh.tar( "--exclude=storage/pgdata", "--exclude=storage/object/anaconda-repository", "-czvf", process.storage_backup_name, "storage" ) sh.mv( f'/opt/anaconda/{process.storage_backup_name}', f'{process.backup_directory}/' ) elif action == 'restore': sh.cp( f'{process.backup_directory}/{process.storage_backup_name}', '/opt/anaconda' ) with sh.pushd('/opt/anaconda'): sh.tar( '-xzvf', f'/opt/anaconda/{process.storage_backup_name}' ) sh.rm(f'{process.storage_backup_name}')
def update_repo(data): repo_name = data['repository']['name'] repo_owner = data['repository']['owner']['name'] repo_url = data['repository']['clone_url'] repo_path = get_repo_path(repo_owner, repo_name) if not repo_path.exists(): repo_path.mkdir(parents=True) with pushd(repo_path): git.clone(repo_url, '.', bare=True) else: with pushd(repo_path): git.fetch('origin', data['ref'])
def test_with_tilde_prefixed_dirs(self, tmpdir, mocker): yaml = dedent(""" # some_comment ~/in_my_home_dir/ """) # pass an empty yaml # move into tmpdir # ensure the workflow exits 0 and prints nothing yaml_path = "empty.yaml" with sh.pushd(tmpdir): with open(yaml_path, "w") as f: print(yaml, file=f) for d in [ "existing_dir", "an_existing_dir", "a_dir/with_a_subdir" ]: os.makedirs(d, exist_ok=True) assert os.path.isdir(d) with pytest.raises(Exception) as e: mocker.patch("builtins.print") run_workflow(yaml_path) print.assert_called_with( "Don't use tilde `~` in dirnames in `validate_dir_existence.py`" )
def test_error_if_target_is_missing(self, tmpdir): # - make a tempdir # - attempt to make a link from a file to another (nonexisting) file # - error should be thrown with sh.pushd(tmpdir): with pytest.raises(FileNotFoundError): add_relative_symlink("doesnt_exist.txt", "some_link")
def get_lines(self, repo_path): with sh.pushd(repo_path): a = sh.python3('-c', ugly_one_liner) lines = [ x for x in a.stdout.decode('utf-8').split('\n') if '>>>' in x ] return lines
def commit_file_and_get_hash(repo_path, file_name): with sh.pushd(repo_path): sh.touch(file_name) sh.git("add", file_name) sh.git("commit", "-m", "'adding {}'".format(file_name)) commit_hash = str(sh.git("rev-parse", "HEAD")).strip() return commit_hash
def cleanup_and_restore_files(process): timestamp = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H%M%S') # Compress and timestamp the existing files minus the repos with sh.pushd('/opt/anaconda'): sh.tar( "--exclude=storage/object/anaconda-repository", "-czvf", f"git_pgdata.snapshot_{timestamp}.tar.gz", "storage" ) # Cleanup directories as things will get restored sh.rm('-Rf', '/opt/anaconda/storage/git') sh.rm('-Rf', '/opt/anaconda/storage/pgdata') sh.rm('-Rf', '/opt/anaconda/storage/object/anaconda-objects') sh.rm('-Rf', '/opt/anaconda/storage/object/anaconda-projects') # Restore the files file_backup_restore(process, 'restore') # Recreate the postgres directory and set permissions sh.mkdir(process.postgres_system_backup) sh.chown('999:root', f'{process.postgres_system_backup}') sh.chmod('700', f'{process.postgres_system_backup}') return
def git_push_to_master(cwd, hosts, name="all", force=False): """ To push to master :param cwd: :param hosts: :param force: :return: """ def process_output(line): print(line) with sh.pushd(cwd): name = "magic_deploy_%s" % name if sh.git("status", "--porcelain").strip(): raise Exception("Repository is UNCLEAN. Commit your changes") remote_list = sh.git("remote").strip().split() if name in remote_list: sh.git("remote", "remove", name) sh.git("remote", "add", name, hosts[0]) if len(hosts) > 1: for h in hosts: sh.git("remote", "set-url", name, "--push", "--add", h) _o = ["push", name, "master"] if force: _o.append("--force") sh.git(*_o, _out=process_output) sh.git("remote", "remove", name)
def test_comment_only_file_has_empty_md5sum(self, tmpdir): with sh.pushd(tmpdir): f_comment = "comment_file" with open(f_comment, mode="w") as f: print("# comment line", file=f) assert get_md5sum(f_comment, comment="#") == empty_md5()
def build_seed(self): shutil.rmtree(self.tmp("seed"), ignore_errors=True) shutil.copytree(DIR + "/deploy/seed", self.tmp("seed")) shutil.copytree(DIR + "/deploy/machine/bin", self.tmp("seed/bin")) self.build_seed_config() with sh.pushd(self.tmp("")): tar_zcf(["seed.tar.gz", "seed"]) shutil.rmtree(self.tmp("seed"))
def test_clone_and_checkout(self, tmpdir): with sh.pushd(tmpdir): repo_name = "my_repo" sh.git("init", repo_name) commit_hash_1 = commit_file_and_get_hash(repo_name, "file1") _ = commit_file_and_get_hash(repo_name, "file2") copied_repo_name = "my_copy" copied_repo = ExternalRepository(repo_name, commit_hash_1, copied_repo_name) copied_repo.clone() copied_repo.checkout() with sh.pushd(copied_repo_name): assert os.path.isfile("file1") assert not os.path.isfile("file2")
def add_remote(repo, name, url): """Add a remote to the Git repository.""" with sh.pushd(repo): try: git.remote("add", name, url) except sh.ErrorReturnCode_3: git.remote("set-url", name, url) git.fetch(name)
def run_prep(self, ensure_autosetup: bool = True): """ run `rpmbuild -bp` in the dist-git repo to get a git-repo in the %prep phase so we can pick the commits in the source-git repo @param ensure_autosetup: replace %setup with %autosetup if possible """ rpmbuild = sh.Command("rpmbuild") with sh.pushd(self.dist_git_path): BUILD_dir = Path("BUILD") if BUILD_dir.is_dir(): # remove BUILD/ dir if it exists # for single-commit repos, this is problem in case of a rebase # there would be 2 directories which the get_build_dir() function # would not handle shutil.rmtree(BUILD_dir) cwd = Path.cwd() logger.debug(f"Running rpmbuild in {cwd}") specfile_path = Path(f"SPECS/{cwd.name}.spec") rpmbuild_args = [ "--nodeps", "--define", f"_topdir {cwd}", "-bp", ] if self.log_level: # -vv can be super-duper verbose rpmbuild_args.append("-" + "v" * self.log_level) rpmbuild_args.append(str(specfile_path)) if ensure_autosetup: self._enforce_autosetup() try: running_cmd = rpmbuild(*rpmbuild_args) except sh.ErrorReturnCode as e: # This might create a tons of error logs. # Create a child logger, so that it's possible to filter # for them, for example in Sentry. rpmbuild_logger = logger.getChild("rpmbuild") for line in e.stderr.splitlines(): rpmbuild_logger.error(str(line)) # Also log the failure using the main logger. logger.error(f"{['rpmbuild', *rpmbuild_args]} failed") raise self.dist_git.repo.git.checkout(self.relative_specfile_path) logger.debug( f"rpmbuild stdout = {running_cmd}") # this will print stdout logger.info(f"rpmbuild stderr = {running_cmd.stderr.decode()}") hook_cmd = get_hook(self.package_name, AFTER_PREP_HOOK) if hook_cmd: bash = sh.Command("bash") bash("-c", hook_cmd)
def test_md5sum_for_utf8_binary_file(self, tmpdir): with sh.pushd(tmpdir): f_binary = "binary_file" with open(f_binary, "wb") as f: f.write(b"\x0a\x1b\x2c") f.write(b"\x3d\x4e\x5f") # computed using md5sum at command line: # - might be better to compare the python result with subprocess.run(["md5sum", f_binary]) assert get_md5sum(f_binary) == "76aa4e16e87c63faa6cb03fc5f7f17f9"
def run_playbook(*args, **kwargs): playbook = "_propelc/playbook.yml" with sh.pushd(CWD): sh.ansible_playbook(playbook, "-i", "_propelc/hosts", _out=sh_verbose, *args, **kwargs)
def fetch_external_doc(repository, destination): sh.rm('-rf', destination) sh.mkdir('-p', destination) with sh.pushd(destination): if osp.exists('.git') and not has_pulled.get(repository): sh.git('pull') has_pulled[repository] = True else: sh.git('clone', repository, '--depth', '1', '.')
def send_emails(loc, subject, cc, smtp): global sm sm = smtplib.SMTP_SSL(host=smtp['host']) sm.login(smtp['user'], smtp['pass']) with sh.pushd(loc): for uniq in os.listdir(): with open(uniq) as f: send_email(uniq, f.read(), subject, cc)
def test_md5sum_for_latin1_binary_file(self, tmpdir): with sh.pushd(tmpdir): f_latin1 = "latin1_file" with open(f_latin1, "wb") as f: # acute-'e' f.write(u"abcd\xe91".encode("latin-1")) f.write(u"\x3d\x4e\x5f".encode("latin-1")) # computed using md5sum at command line assert get_md5sum(f_latin1) == "ddb57847096b82ccaf7d84a66c03584b"
def xcompile_openssl(build_context, arch, output, path): # XXX: only android assert isinstance(build_context, AndroidBuildContext) target = 'android-%s' % arch env = build_context.get_build_env(arch, output) with sh.pushd(path): sh.Command('./Configure')(target, '--prefix=%s' % output, _fg=True, _env=env) sh.make(_fg=True, _env=env) sh.make('install_sw', 'VERBOSE=1', _fg=True, _env=env)
def test_subdir_is_made_when_subdir_doesnt_exist(self, tmpdir): # - make a tempdir # - add a file # - make a link from a location in a non-existing subdir to the file with sh.pushd(tmpdir): sh.touch("a.txt") link = "subdir/b.txt" add_relative_symlink("a.txt", link) assert os.path.islink(link) assert os.readlink(link) == "../a.txt"
def set_git_remotes(name, hosts): with sh.pushd(CWD): name = git_remote_name(name) remote_list = sh.git("remote").strip().split() if name in remote_list: sh.git("remote", "remove", name) sh.git("remote", "add", name, hosts[0]) if len(hosts) > 1: for h in hosts: sh.git("remote", "set-url", name, "--push", "--add", h)
def xcompile_autotools(build_context, arch, output, path): host = build_context.get_host(arch) env = build_context.get_build_env(arch, output) with sh.pushd(path): sh.Command('./configure')( '--host=%s' % host, '--prefix', output, _fg=True, _env=env, ) sh.make(_fg=True, _env=env) sh.make('install', _fg=True, _env=env)
def test_error_if_linkloc_exists_but_is_not_a_link(self, tmpdir): # - make a tempdir # - add two files # - make a link from one file to the other # - making the link should fail since a (non-link) file exists at the # link location with sh.pushd(tmpdir): sh.touch("a.txt") sh.touch("b.txt") with pytest.raises(FileExistsError): add_relative_symlink("a.txt", "b.txt")
def get_tf_version(tf_module_path): """ """ tf_version = "" tf_version_re = '^Terraform\s.*?(?=\w+)(v.*[0-9.]+)' with sh.pushd(tf_module_path): m = re.findall(tf_version_re, str(terraform.version())) if m: tf_version = m[0] log.info('detected terraform {}'.format(tf_version)) return tf_version
def test_make_fresh_link(self, tmpdir): # - make a temp dir # - add a file # - use make_soft_link to make a link to that file from a previously # non-existing location target = "abc.txt" link = "abc.link" with sh.pushd(tmpdir): sh.touch(target) add_relative_symlink(target, link) assert os.path.islink(link) assert os.readlink(link) == target
def setup_repo_data(driver): """Use gips_inventory to ensure assets are present.""" if (driver in setup_attempted or not pytest._config_saved_by_gips.getoption('setup_repo')): return as_path = pytest._config_saved_by_gips.getini('artifact-store-path') das_path = os.path.join(as_path, driver) # technically should be der_path '''ftp method, not currently in use username, password, host, path = [pytest.config.getini(ini) for ini in ( 'artifact-store-user', 'artifact-store-password', 'artifact-store-host', 'artifact-store-path')] url_template = 'ftp://{}:{}@{}/{}/{}/' url_head = url_template.format(username, password, host, path, driver) sanitized_uh = url_template.format( username, '<password>', host, path, driver) # first check to see if assets exist already -- if they're in the gips # data store, they're probably already cataloged by the ORM ftps = ftplib.FTP_TLS(host, username, password) ftps.prot_p() ftps.cwd(path + '/' + driver) remote_files = ftps.nlst() ''' remote_files = os.listdir(das_path) local_files = [ os.path.basename(fp) for fp in glob.glob( os.path.join(pytest._config_saved_by_gips.getini('data-repo'), driver, 'tiles', '*/*/*')) ] if set(remote_files).issubset(set(local_files)): print(driver, 'asset files already present; no setup needed') return print('Installing', driver, 'assets from', das_path) try: temp_dir = tempfile.mkdtemp() with sh.pushd(temp_dir): #sh.wget('--recursive', '--no-directories', url_head) for fn in remote_files: shutil.copy(os.path.join(das_path, fn), temp_dir) sh.gips_archive(driver, '-v99', _err='/dev/stderr', _out='/dev/stdout') finally: shutil.rmtree(temp_dir) print(driver, "data installation complete.")
def extract_pair_package(self, file, extraction_dir='.'): workdir = dirname(file) with pushd(workdir): # Extract the file from the .deb dpkg_deb('-x', file, extraction_dir) # Copy the files we need cp('-rlf', glob('usr/share/apertium/*'), extraction_dir) # Remove everything else rm('-Rf', 'usr') # Rewrite paths in modes files for mode in glob('modes/*.mode'): self.replace_in_file(mode, '/usr/share/apertium', workdir) return workdir
def test_md5sum_for_existing_files(self, tmpdir): with sh.pushd(tmpdir): f_empty = "empty_file" f_non_empty = "non_empty_file" sh.touch(f_empty) with open(f_non_empty, "w") as f: print("some-data", file=f) assert isinstance(get_md5sum(f_empty), str) assert get_md5sum(f_empty) == empty_md5() assert get_md5sum(f_non_empty) != empty_md5() pass
def grade_q2b(uniqname): try: repo, path = clone('q2b', uniqname, 'csprag-git-conflict2') except sh.ErrorReturnCode as e: text = ''' <p><strong>Error! Failed to clone {}</strong></p> <p>Ran command: <tt>{}</tt></p> <p>stdout:</p> <pre> {} </pre></p> <p>stderr:</p> <pre> {} </pre> '''.format('csprag-git-conflict2', e.full_cmd, e.stdout.decode('utf8'), e.stderr.decode('utf8')) return 0, text with sh.pushd(path): golden = '''\ README.md main.py sales-2016-03-01 sales-2016-03-02 sales-2016-03-03 ''' out = sh.ls('-1') text = '' grade = 0 text += '<dd><strong>Merge path conflict [base 1.0]</strong></dd>\n' grade += 1.0 if out == golden: text += '<dd>All correct!</dd>' else: text += '<dd>Incorrect files in directory after merge [-1.0]. Expected <pre>{}</pre>, but got <pre>{}</pre></dd>'.format( golden, out) grade -= 1.0 q2b_entry = ''' <dt>Question 2b</dt> <dd>{:1.1f}/1.0</dd> <dl> {}</dl> '''.format(grade, text) return grade, text
def test_dont_throw_error_if_link_points_to_target(self, tmpdir, mocker): # - make a temp dir # - add a file # - make a soft link to that file from a link-name # - use make_soft_link to make a link to that file from the same link-name # - check that the second call did not call os.symlink target = "abc.txt" link = "abc.link" with sh.pushd(tmpdir): sh.touch(target) add_relative_symlink(target, link) add_relative_symlink(target, link) assert os.path.islink(link) assert os.readlink(link) == target
def test_nothing_printed_when_all_tests_pass(self, tmpdir, mocker): yaml = dedent(""" test1: input_file: empty_file expected_md5sum: {} """).format(empty_md5()) with sh.pushd(tmpdir): sh.touch("empty_file") with open("config.yaml", "w") as f: print(yaml, file=f) mocker.patch("builtins.print") run_workflow("config.yaml") print.assert_not_called()
def grade_q2b(uniqname): try: repo,path = clone('q2b', uniqname, 'c4cs-w16-conflict2') except sh.ErrorReturnCode as e: text = ''' <p><strong>Error! Failed to clone {}</strong></p> <p>Ran command: <tt>{}</tt></p> <p>stdout:</p> <pre> {} </pre></p> <p>stderr:</p> <pre> {} </pre> '''.format('c4cs-w16-conflict2', e.full_cmd, e.stdout.decode('utf8'), e.stderr.decode('utf8')) return 0, text with sh.pushd(path): golden = '''\ README.md main.py sales-2016-03-01 sales-2016-03-02 sales-2016-03-03 ''' out = sh.ls('-1') text = '' grade = 0 text += '<dd><strong>Merge path conflict [base 0.5]</strong></dd>\n' grade += 0.5 if out == golden: text += '<dd>All correct!</dd>' else: text += '<dd>Incorrect files in directory after merge [-0.5]. Expected <pre>{}</pre>, but got <pre>{}</pre></dd>'.format(golden, out) grade -= 0.5 q2b_entry = ''' <dt>Question 2b</dt> <dd>{:1.1f}/0.5</dd> <dl> {}</dl> '''.format(grade, text) return grade, text
def git_push_to_master(cwd, hosts, name="all"): """ To push to master :param cwd: :param hosts: :param force: :return: """ with sh.pushd(cwd): name = "juicy_deploy_%s" % name if sh.git("status", "--porcelain").strip(): raise Exception("Repository is UNCLEAN. Commit your changes") remote_list = sh.git("remote").strip().split() if name in remote_list: sh.git("remote", "remove", name) sh.git("remote", "add", name, hosts[0]) if len(hosts) > 1: for h in hosts: sh.git("remote", "set-url", name, "--push", "--add", h) sh.git("push", name, "master") sh.git("remote", "remove", name)
def grade_q2a(uniqname): try: repo,path = clone('q2a', uniqname, 'c4cs-w16-conflict1') except sh.ErrorReturnCode as e: text = ''' <p><strong>Error! Failed to clone {}</strong></p> <p>Ran command: <tt>{}</tt></p> <p>stdout:</p> <pre> {} </pre></p> <p>stderr:</p> <pre> {} </pre> '''.format('c4cs-w16-conflict1', e.full_cmd, e.stdout.decode('utf8'), e.stderr.decode('utf8')) return 0, text with sh.pushd(path): golden = '''\ Welcome to the simple test program According to current estimates, the diag construction will be done: Summer 2017. ''' test_golden = '''\ Success ''' text = '' grade = 0 text += '<dd><strong>Merge content conflict [base 0.5]</strong></dd>\n' grade += 0.5 if not os.path.exists('main.py'): text += '<dd>No <tt>main.py</tt> in repository [-0.5]</dd>' grade -= 0.5 elif not os.path.exists('test.sh'): text += '<dd>No <tt>test.sh</tt> in repository [-0.5]</dd>' grade -= 0.5 else: mainpy = open('main.py').read() testsh = open('test.sh').read() if ('>>>>' in mainpy) or ('<<<<' in mainpy): text += '<dd>Unresolved conflict in <tt>main.py</tt>. Contents:<pre>{}</pre></dd>'.format(mainpy) grade -= 0.5 elif ('>>>>' in testsh) or ('<<<<' in testsh): text += '<dd>Unresolved conflict in <tt>test.sh</tt>. Contents:<pre>{}</pre></dd>'.format(testsh) grade -= 0.5 else: try: out = sh.python('main.py') test_out = sh.bash('test.sh') if ('diag construction' not in out) or ('Summer 2017' not in out): text += '<dd><tt>main.py</tt> content not merged, missing diag construction or completion date [-0.5]. Output of main.py:<pre>{}</pre></dd>'.format(out) grade -= 0.5 else: if out != golden: text += '<dd>Output of main.py seems to be a merge, but not quite correct [-0.2]. Expected <pre>{}</pre>, student main.py output <pre>{}</pre>'.format(golden, out) grade -= 0.2 if test_out != test_golden: text += '<dd><tt>test.sh</tt> does not report success [-0.2]. Output of test.sh:<pre>{}</pre></dd>'.format(test_out) grade -= 0.2 except sh.ErrorReturnCode as e: text += '<dd><tt>main.py</tt> or <tt>test.sh</tt> does not run [-0.5]. Output <pre>Ran {}\n\nStdout\n{}Stderr\n{}</pre></dd>'.format(e.full_cmd, e.stdout.decode('utf8'), e.stderr.decode('utf8')) grade -= 0.5 if grade == 0.5: text += '<dd>All correct!</dd>' q2a_entry = ''' <dt>Question 2a</dt> <dd>{:1.1f}/0.5</dd> <dl> {}</dl> '''.format(grade, text) return grade, text
logging.getLogger("sh").setLevel(logging.WARNING) BUILD_DIR = 'builds' PRISTINE = os.path.join(BUILD_DIR, 'simulator') # Start from scratch rm('-r', '-f', BUILD_DIR) mkdir(BUILD_DIR) # No fuse in the CI environment means we need a unique directory for each # variant. We first grab a copy of the simulator folder and wipe out all things # tup to use as a template so that the same script will work locally and in CI cp('-r', 'simulator', PRISTINE) with sh.pushd(PRISTINE): rm('-r', '-f', '.tup') rm('-r', '-f', sh.glob('build-*')) variants = {} for variant_file in os.listdir('simulator/configs'): log.info("Building {}".format(variant_file)) variant = os.path.basename(variant_file) build_variant_dir = os.path.join(BUILD_DIR, variant) mkdir(build_variant_dir) cp('-r', PRISTINE, build_variant_dir) with sh.pushd(os.path.join(build_variant_dir, 'simulator')):
def main(): try: prog = "pypi-up" desc = "%s %s" % (__title__, __version__) desc += " - pypi-up a simple command line tool to increase version number of package" \ "and release on Pypi. Also Git Tag/Push the release" parser = argparse.ArgumentParser(prog=prog, description=desc) parser.add_argument("--setup", help="Setup PYPI-REL", action="store_true") parser.add_argument("-v", "--version", help="Show the current version", action="store_true") parser.add_argument("-p", "--patch", help="Increment PATCH version", action="store_true") parser.add_argument("-m", "--minor", help="Increment MINOR version and reset patch", action="store_true") parser.add_argument("-j", "--major", help="Increment MAJOR version and reset minor and patch", action="store_true") parser.add_argument("-e", "--edit", help="Manually enter the version number to bump", action="store") parser.add_argument("--dry", help="DRY RUN. To test the release, but it will not make any changes", action="store_true") parser.add_argument("-x", "--skip-prompt", help="Skip prompt", action="store_true") arg = parser.parse_args() config = ConfigParser.ConfigParser() print("-" * 80) print("=== PYPI Up ===") print("") if arg.setup: print("Setting up...") if not os.path.isfile(about_file): with open(about_file, "w+") as f: f.write(about_file_content) if not os.path.isfile(setup_cfg): config.add_section(conf_section_name) config.set(conf_section_name, "version-file", "__about__.py") config.set(conf_section_name, "auto-increment", "patch") with open(setup_cfg, "w+") as f: config.write(f) print("Done!") print("-" * 80) exit() with sh.pushd(CWD): if sh.git("status", "--porcelain").strip(): raise Exception("Repository is UNCLEAN. Commit your changes") config.read(setup_cfg) version_file = config.get(conf_section_name, "version-file") version_file = os.path.join(CWD, version_file) if not os.path.isfile(version_file): raise Exception("version-file '%s' is required" % version_file) auto_inc = "" if config.has_option(conf_section_name, "auto-increment"): auto_inc = config.get(conf_section_name, "auto-increment").strip() rvnup = Reversionup(file=setup_cfg) old_version = rvnup.version def test_auto_inc(auto_inc_): return auto_inc == auto_inc_ and (not arg.patch and not arg.minor and not arg.major and not arg.version and not arg.edit) if arg.edit: rvnup.version = arg.edit elif arg.patch or test_auto_inc("patch"): rvnup.inc_patch() elif arg.minor or test_auto_inc("minor"): rvnup.inc_minor() elif arg.major or test_auto_inc("major"): rvnup.inc_major() elif arg.version: print("Current version: %s" % rvnup.version) print("-" * 80) exit() if arg.dry: print("** DRY RUNNING **") print("") print("* New version: %s " % rvnup.version) print("Old version: %s" % old_version) print("") if not arg.skip_prompt \ and raw_input("Continue with the release? (y | n) ").strip() == "n": print("** Release Aborted") print("-" * 80) exit() skip_tag = not arg.skip_prompt \ and raw_input("Git Tag/Push release version? (y | n) ").strip().lower() == "n" skip_pypi = not arg.skip_prompt \ and raw_input("Release to PYPI? (y | n) ").strip().lower() == "n" print("") if not arg.dry: rvnup.write() replace_file_version(version_file, rvnup.version) if arg.dry or skip_tag: print("- Git Tag/Push release: skipped") else: tag_name = "v%s" % rvnup.version print("+ Git Tag release version: %s " % tag_name) sh.git("add", ".") sh.git("commit", "-m", "Tagged release: %s" % tag_name) sh.git("tag", "-a", tag_name, "-m", tag_name) print("+ Git Push release to repo ...") sh.git("push", "origin", "master") sh.git("push", "--tags") if arg.dry or skip_pypi: print("- Release to Pypi: skipped") else: print("+ Releasing to PYPI ...") sh.python("setup.py", "register", "-r", "pypi") sh.python("setup.py", "sdist", "upload", "-r", "pypi") print("-" * 80) print("") except Exception as ex: print("Error: %s" % ex.message) exit(1)
def grade_q1(uniqname, repo): repo, path = clone('wk12-dotfiles', uniqname, repo) with sh.pushd(path): if os.path.exists(os.path.join(path, 'configs')): # Handle case of person who put stuff in folder.. files = set(sh.ls('-a', 'configs').split()) elif os.path.exists(os.path.join(path, 'bash')): # Handle case of person who put stuff in folder.. files = set(sh.ls('-a', 'bash').split()) elif os.path.exists(os.path.join(path, 'dotfiles')): # Handle case of person who put stuff in folder.. files = set(sh.ls('-a', 'dotfiles').split()) elif os.path.exists(os.path.join(path, 'shell')): # Handle case of person who put stuff in folder.. files = set(sh.ls('-a', 'shell').split()) else: files = set(sh.ls('-a').split()) files.discard('.') files.discard('..') files.discard('.git') text = '' grade = 0 # Viable .bashrc targets we've seen or are in the spirit of the Q bashrc = set(( '.bashrc', 'bashrc', 'bash.bashrc', '.bash_profile', 'bash_profile', '.zshrc', 'zshrc', 'mybashrc', 'bilraj.dotfiles.bashrc', )) if len(files.intersection(bashrc)): # Bashrc file text += '<dd><strong>.bashrc [1.0/1.0]</strong></dd>\n' grade += 1.0 else: text += '<dd><strong>.bashrc [0.0/1.0]</strong></dd>\n' text += '<dd>Output of <tt>find .</tt></dd>\n' text += '<dd><pre>{}</pre></dd>\n'.format(sh.find('.')) with grades_lock: no_bash.add(uniqname) grade += 0.0 others = files - bashrc nonfiles = set(( 'README', 'README.md', 'readme.md', 'README.txt', 'LICENSE', 'LICENSE.txt', 'setupDotfiles.sh', '3', 'update.sh', 'system_spec', 'init.sh', 'main.py', 'hello.sh', 'setupDotfiles.sh', 'status_repos.py', 'link.sh', 'test', 'test1.cpp', 'test2.cpp', 'test3.cpp', 'backup.sh', '.bashrc-miniconda3.bak', 'prog.cpp', 'file1.txt', 'file2.txt', 'file3.txt', 'dotfiles_old', 'setup_dotfiles.sh', 'source.sh', 'q2.py', 'games', 'duncanProf', 'trial.py', 'clean.sh', 'bootstrap.sh', 'q3.py', 'a.cpp', 'make_symlink.sh', 'update_repos.py', '__pycache__', 'test1', 'shell', 'a.h', 'copy.sh', 'scripts', 'makesymlinks.sh', 'os', 'set_up.sh', 'a', 'pro.py', 'link_files.sh', 'install.sh', 'main.pyc', )) with grades_lock: for o in others: other_dotfiles.add(o) if len(others): # Any other dotfile text += '<dd><strong>Any other dotfile ({}) [1.0/1.0]</strong></dd>\n'.format(others.pop()) grade += 1.0 else: text += '<dd><strong>Any other dotfile [0.0/1.0]</strong></dd>\n' text += '<dd>Output of <tt>find .</tt></dd>\n' text += '<dd><pre>{}</pre></dd>\n'.format(sh.find('.')) grade += 0.0 q1_entry = ''' <dt>Week 12</dt> <dd>{:1.1f}/2.0</dd> <dl> {}</dl> '''.format(grade, text) return grade, text