def generate_thumbnail(input_path, output_path): # scale to 720:x # quality is 5 (1-30) # skip first two seconds (in event of dark/black start) # only capture one frame subprocess.run(['ffmpeg', '-i', input_path, '-filter:v', 'scale=720:-1', '-ss', '2', '-qscale:v', '5', '-vframes', '1', output_path], capture_output=True) print(f'Created thumbnail at: {output_path}')
def __init__(self, path): """Create a new named pipe.""" if os.path.exists(path): raise FileExistsError("Named pipe {} already exists.".format(path)) cmd = 'mkfifo ' + path run(cmd, shell=True, check=True) self.path = path
def main(gbdir, outdir): os.makedirs(gbdir, exist_ok=True) os.makedirs(outdir, exist_ok=True) tempq = 'tempquery.fasta' tempdb = 'tempdb.fasta' for org in tqdm(Organism.objects.all()): # get genbank and convert to fasta fpath = os.path.join(gbdir, '{}.gb'.format(org.accession)) if not os.path.isfile(fpath): print('\nFetching {} with accession {}'.format( org.name, org.accession )) fetch(fpath) SeqIO.convert(fpath, 'genbank', tempdb, 'fasta') # get spacers of organism and convert to fasta spacers = Spacer.objects.filter(loci__organism=org) fastatext = ''.join(['>{}\n{}\n'.format(spacer.id, spacer.sequence) for spacer in spacers]) with open(tempq, 'w') as f: f.write(fastatext) # run blast and save output outpath = os.path.join(outdir, '{}.json'.format(org.accession)) commandargs = ['blastn', '-query', tempq, '-subject', tempdb, '-out', outpath, '-outfmt', '15'] subprocess.run(commandargs, stdout=subprocess.DEVNULL) os.remove(tempq) os.remove(tempdb)
def cmd_restore(args): ''' Restore a given archive into all the container's volumes. ''' print('\nrestoring {} for {}\n'.format(args.archive, args.container)) # Ensure that the repository exists if not path.isdir(args.repository): raise BasementException('no backup to restore from') # Ensure that the *archive* exists if run( ['borg', 'info', '{}::{}'.format(args.repository, args.archive)], stdout=DEVNULL, stderr=DEVNULL ).returncode != 0: raise BasementException('archive {} does not exist for this backup'.format(args.archive)) if not args.no_remove: # Delete everything in the target mounts to prepare for a clean restore. mounts = map(lambda m: m.split(':')[1], get_binds(args.container)) for m in mounts: # Only empty directories, as file volumes will be overwritten. if path.isdir(m): # print('rm -rf {pth}/* {pth}/.*'.format(pth=m)) run('rm -rf {pth}/* {pth}/.* 2>/dev/null'.format(pth=m), shell=True) run([ 'borg', 'extract', '{}::{}'.format(args.repository, args.archive) ], cwd=DIR_BACKUPS)
def run_synthtool(ctx: Context) -> None: """Runs synthtool for the initial client generation.""" subprocess.run( [sys.executable, "synth.py"], check=True, cwd=ctx.root_directory / "google-cloud-clients" / ctx.google_cloud_artifact )
def test_pyplot_up_to_date(): gen_script = Path(mpl.__file__).parents[2] / "tools/boilerplate.py" if not gen_script.exists(): pytest.skip("boilerplate.py not found") orig_contents = Path(plt.__file__).read_text() try: subprocess.run([sys.executable, str(gen_script)], check=True) new_contents = Path(plt.__file__).read_text() if orig_contents != new_contents: diff_msg = '\n'.join( difflib.unified_diff( orig_contents.split('\n'), new_contents.split('\n'), fromfile='found pyplot.py', tofile='expected pyplot.py', n=0, lineterm='')) pytest.fail( "pyplot.py is not up-to-date. Please run " "'python tools/boilerplate.py' to update pyplot.py. " "This needs to be done from an environment where your " "current working copy is installed (e.g. 'pip install -e'd). " "Here is a diff of unexpected differences:\n%s" % diff_msg ) finally: Path(plt.__file__).write_text(orig_contents)
def create_db_image(drucker): """Create database image from database container""" print( colorful.white_on_blue( "Committing %s image from %s container..." % (drucker.vars.DB_IMAGE, drucker.vars.DB_CONTAINER) ) ) subprocess.run( 'docker commit -m "%s on %s" %s %s' % ( drucker.vars.DB_CONTAINER, str(date.today()), drucker.vars.DB_CONTAINER, drucker.vars.DB_IMAGE, ), shell=True, ) print(colorful.white_on_blue("Deleting initial container...")) subprocess.getoutput( "docker rm -f %s > /dev/null 2>&1" % (drucker.vars.DB_CONTAINER) ) create_db_container(drucker)
def tearDown(self): cmd = [ "mysql", "-u", "root", "-e", "DROP DATABASE charakoba_api;" ] shell.run(cmd)
def path_source_reference(path_source_in_repo, variables): """ Copy over media in repo to temp folder (this allows symlinking later) Some files are missing from the source set and need to be derived when this fixture is called """ tempdir = tempfile.TemporaryDirectory() test_media_filenames = set(os.listdir(path_source_in_repo)) for filename in test_media_filenames: shutil.copy2(os.path.join(path_source_in_repo, filename), os.path.join(tempdir.name, filename)) # Derive other test media if 'test1.mp4' not in test_media_filenames: # TODO: use `variables` to aquire `cmd_ffmpeg` cmd = ('ffmpeg', '-f', 'image2', '-framerate', '0.1', '-i', os.path.join(path_source_in_repo, 'test1_%03d.png'), '-f', 'lavfi', '-i', 'anullsrc', '-shortest', '-c:a', 'aac', '-strict', 'experimental', '-r', '10', '-s', '640x480', os.path.join(tempdir.name, 'test1.mp4')) cmd_result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=20) assert os.path.isfile(os.path.join(tempdir.name, 'test1.mp4')) if 'test2.ogg' not in test_media_filenames: # TODO: use `variables` to aquire `cmd_sox` cmd = ('sox', '-n', '-r', '44100', '-c', '2', '-L', os.path.join(tempdir.name, 'test2.ogg'), 'trim', '0.0', '15.000') cmd_result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=20) assert os.path.isfile(os.path.join(tempdir.name, 'test2.ogg')) yield tempdir.name tempdir.cleanup()
def main_check_all(): """Check the coverage for all files individually. This makes sure the files have 100% coverage without running unrelated tests. This runs pytest with the used executable, so check_coverage.py should be called with something like ./.tox/py36/bin/python. """ for test_file, src_file in PERFECT_FILES: if test_file is None: continue subprocess.run( [sys.executable, '-m', 'pytest', '--cov', 'qutebrowser', '--cov-report', 'xml', test_file], check=True) with open('coverage.xml', encoding='utf-8') as f: messages = check(f, [(test_file, src_file)]) os.remove('coverage.xml') messages = [msg for msg in messages if msg.typ == MsgType.insufficent_coverage] if messages: for msg in messages: print(msg.text) return 1 else: print("Check ok!") return 0
def runTask(tmpTask): if tmpTask == "Add Multimedia to Server": tmpVar = readFile("Sorter", "", "", recordFile) elif tmpTask == "Open Web Browser": webbrowser.open("www.google.com") ''' with urllib.request.urlopen("http://www.espn.com") as response: html = response.read() print(html) ''' elif tmpTask == "Perform System Maintenance": # check to see if has admin rights try: is_admin = os.getuid() == 0 except: is_admin = ctypes.windll.shell32.IsUserAnAdmin() if is_admin == 0: print("Please close program and 'Run as Administrator'!") else: userResponse = input("Run system cleanup?... yes/no: ") if userResponse.upper() == "YES": subprocess.run("cleanmgr") userResponse = input("Run defrag of local drives?... yes/no: ") if userResponse.upper() == "YES": defrag() userResponse = input("Check system files?... yes/no: ") if userResponse.upper() == "YES": subprocess.run("sfc /scannow") elif tmpTask == "Port Scanner": scanports() else: print("Could not complete that task... Contact Admin!")
def main_check(): """Check coverage after a test run.""" try: with open('coverage.xml', encoding='utf-8') as f: messages = check(f, PERFECT_FILES) except Skipped as e: print(e) messages = [] if messages: print() print() scriptutils.print_title("Coverage check failed") for msg in messages: print(msg.text) print() filters = ','.join('qutebrowser/' + msg.filename for msg in messages) subprocess.run([sys.executable, '-m', 'coverage', 'report', '--show-missing', '--include', filters], check=True) print() print("To debug this, run 'tox -e py36-pyqt59-cov' " "(or py35-pyqt59-cov) locally and check htmlcov/index.html") print("or check https://codecov.io/github/qutebrowser/qutebrowser") print() if 'CI' in os.environ: print("Keeping coverage.xml on CI.") else: os.remove('coverage.xml') return 1 if messages else 0
def batchPdfConversion(SourceFolder,DestinationFolder): # ***create pdfs files = [file for file in os.listdir(SourceFolder) if (os.path.splitext(file)[1] == ".md" and os.path.splitext(file)[0] != "index")] folders = [folder for folder in os.listdir(SourceFolder) if (os.path.isdir(os.path.join(SourceFolder,folder)) and not folder.startswith("__") and not folder.startswith(".") and folder != "assets")] if os.path.exists(DestinationFolder): shutil.rmtree(DestinationFolder) os.makedirs(DestinationFolder) #outer if files: for file in files: print("starting conversion: " + file + " to pdf...") command = ['pandoc',"--variable","fontsize=14pt","--variable","documentclass=extarticle",os.path.join(SourceFolder,file),'--latex-engine=xelatex','--template=./assets/me.latex','-o',os.path.join(DestinationFolder,replaceMdByPdf(file))] subprocess.run(command) print("conversion completed: " + file + " to pdf...") #inner for folder in folders: os.makedirs(os.path.join(DestinationFolder,folder)) filess = [file for file in os.listdir(os.path.join(SourceFolder,folder)) if (os.path.splitext(file)[1] == ".md" and os.path.splitext(file)[0] != "index")] for file in filess: print("starting conversion: " + file + " to pdf...") command = ['pandoc',"--variable","fontsize=14pt","--variable","documentclass=extarticle",os.path.join(SourceFolder,folder,file),'--latex-engine=xelatex','--template=./assets/me.latex','--highlight-style=pygments','-o',os.path.join(DestinationFolder,folder,replaceMdByPdf(file))] subprocess.run(command) print("conversion completed: " + file + " to pdf...") # ***combine pdfs #outer files = [file for file in os.listdir(DestinationFolder) if (os.path.splitext(file)[1] == ".pdf") ] if files: merger = PyPDF2.PdfFileMerger() for filename in files: print("combining " + filename) merger.append(PyPDF2.PdfFileReader(open(os.path.join(DestinationFolder,filename),'rb'))) print("combined " + filename) merger.write(os.path.join(DestinationFolder,"notes.pdf")) #inner folders = [folder for folder in os.listdir(DestinationFolder) if (os.path.isdir(os.path.join(SourceFolder,folder)) and folder.startswith("__") and folder != "assets")] for folder in folders: files = [file for file in os.listdir(os.path.join(DestinationFolder,folder)) if(os.path.splitext(file)[1] == ".pdf")] merger = PyPDF2.PdfFileMerger() for filename in files: print("combining " + filename) merger.append(PyPDF2.PdfFileReader(open(os.path.join(DestinationFolder,folder,filename),'rb'))) print("combined " + filename) merger.write(os.path.join(DestinationFolder,folder,sanitizeFoldername(folder) + ".pdf")) print("=======PDfs generated========")
def runBLAST(results_dir,queryfile): my_genome = basename(results_dir) blast_cmd = ["blastn", "-task", "megablast", "-db", my_genome, "-outfmt", "5", "-max_target_seqs", "1", "-query", queryfile, "-out", "./results.xml"] run(blast_cmd, cwd=results_dir)
def _send_xo_cmd(cmd_str): LOGGER.info('Sending xo cmd') subprocess.run( shlex.split(cmd_str), stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
def tikz2image(tikz_src, filetype, outfile): tmpdir = mkdtemp() olddir = os.getcwd() os.chdir(tmpdir) # Write tikz.tex file. with open('tikz.tex', 'w') as f: f.write( '\n'.join( [ "\\RequirePackage{luatex85,shellesc}" , "\\documentclass{standalone}" , "\\usepackage{tikz}" , "\\usepackage[sfdefault]{firasans}" , "\\usepackage[small,euler-digits]{eulervm}" , "\\usepackage{pgfplots}" , "\\pgfplotslibrary[]{units,groupplots}" , "\\begin{document}" ] )) f.write(tikz_src) f.write("\n\\end{document}\n") subprocess.run( ["latexmk", "-pdf", "-lualatex", '--shell-escape', '-silent', 'tikz.tex'] , stdout=sys.stderr ) os.chdir(olddir) if filetype == 'pdf': shutil.copyfile(tmpdir + '/tikz.pdf', outfile + '.pdf') else: subprocess.run(["convert", tmpdir + '/tikz.pdf', outfile + '.' + filetype]) shutil.rmtree(tmpdir)
def main(): # Check for clean local working tree status_res = run(['git', 'status', '--short'], stdout=PIPE) entries = [e.strip() for e in status_res.stdout.decode('utf-8').split('\n') if e] for entry in entries: [status, path] = entry.split(' ') if status != '??': print('Working directory is not clean') # List unmerged Git branches branch_list_res = run(['git', 'branch', '--no-merged'], stdout=PIPE) if branch_list_res.returncode: raise "Listing remote branches failed" branch_list = [b.decode('utf-8').strip() for b in branch_list_res.stdout.strip().split(b'\n')] # Rebase each branch in turn onto_branch = 'master' for branch in branch_list: co_result = run(['git', 'checkout', branch], stdout=PIPE) if co_result.returncode: print('{} - Checkout failed'.format(branch)) return rebase_result = run(['git', 'rebase', onto_branch], stdout=PIPE) if rebase_result.returncode: abort_result = run(['git', 'rebase', '--abort']) if abort_result.returncode: print('Rebasing {} failed'.format(abort_result)) return print('{} - Auto-rebase failed'.format(branch)) else: print('{} - Rebased'.format(branch))
def make_upload(test=True): """Upload to PyPI or test.pypi""" if test: cmd = ['make', 'test-upload'] url = 'https://test.pypi.org' else: url = 'https://pypi.org' cmd = ['make', 'upload'] click.confirm( "Ready to upload release to %s?" % url, default=True, abort=True ) success = False while not success: try: run(cmd, check=True) except CalledProcessError as exc_info: click.confirm( "Failed to upload: %s. Try again?" % str(exc_info), default=True, abort=(not test), ) success = False else: success = True click.confirm( "Please check release on %s. Continue?" % url, default=True, abort=True, )
def main(): args = parseArgs() passwd = readpass(confirm=args.new) if args.new: makeNewVeil(args.file, passwd) editDir = tempfile.mkdtemp() editFile = os.path.join(editDir, 'edit') decrypt(args.file, editFile, passwd) try: with tempfile.TemporaryDirectory() as d: origFile = os.path.join(d, 'orig') shutil.copyfile(editFile, origFile) subprocess.run([args.editor, editFile], check=True) if sameFileContent(origFile, editFile): print(args.file, 'not changed.') else: subprocess.run([args.diff, origFile, editFile], check=True) if confirmOverwrite(args.file): encrypt(editFile, args.file, passwd) print(args.file, 'overwritten.') else: print('Discarded changes to {}.'.format(args.file)) except: print('Preserving file:', editFile) raise else: shutil.rmtree(editDir)
def make_and_push_tag(version): """Tag the current commit and push that tag to origin""" click.confirm( "Push tag '%s' to origin?" % version, default=True, abort=True ) run(['git', 'tag', "-s", "v%s" % version], check=True) run(['git', 'push', '--tags', 'origin'], check=True)
def build_windows(): """Build windows executables/setups.""" utils.print_title("Updating 3rdparty content") update_3rdparty.run(ace=False, pdfjs=True, fancy_dmg=False) utils.print_title("Building Windows binaries") parts = str(sys.version_info.major), str(sys.version_info.minor) ver = ''.join(parts) dot_ver = '.'.join(parts) # Get python path from registry if possible try: reg64_key = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Python\PythonCore' r'\{}\InstallPath'.format(dot_ver)) python_x64 = winreg.QueryValueEx(reg64_key, 'ExecutablePath')[0] except FileNotFoundError: python_x64 = r'C:\Python{}\python.exe'.format(ver) out_pyinstaller = os.path.join('dist', 'qutebrowser') out_64 = os.path.join('dist', 'qutebrowser-{}-x64'.format(qutebrowser.__version__)) artifacts = [] from scripts.dev import gen_versioninfo utils.print_title("Updating VersionInfo file") gen_versioninfo.main() utils.print_title("Running pyinstaller 64bit") _maybe_remove(out_64) call_tox('pyinstaller', '-r', python=python_x64) shutil.move(out_pyinstaller, out_64) utils.print_title("Running 64bit smoke test") smoke_test(os.path.join(out_64, 'qutebrowser.exe')) utils.print_title("Building installers") subprocess.run(['makensis.exe', '/DX64', '/DVERSION={}'.format(qutebrowser.__version__), 'misc/qutebrowser.nsi'], check=True) name_64 = 'qutebrowser-{}-amd64.exe'.format(qutebrowser.__version__) artifacts += [ (os.path.join('dist', name_64), 'application/vnd.microsoft.portable-executable', 'Windows 64bit installer'), ] utils.print_title("Zipping 64bit standalone...") name = 'qutebrowser-{}-windows-standalone-amd64'.format( qutebrowser.__version__) shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_64)) artifacts.append(('{}.zip'.format(name), 'application/zip', 'Windows 64bit standalone')) return artifacts
def install_app_images(): """Install all applications that are not available in apt by default.""" # download_directory = '/home/tom/' file_extension = '.AppImage' # balenaEtcher (https://github.com/balena-io/etcher) print('******etcher******') # TODO compare versions before downloading. # Download the latest AppImage from github etcher_appname = 'etcher' etcher_url = 'https://github.com/balena-io/etcher/' releases_url = 'releases/latest/download/' reference_url = 'latest-linux.yml' request_url = etcher_url + releases_url + reference_url # download yml content and convert to a string yml = requests.get(request_url).content.decode() for line in yml.split('\n'): if line[4:8] == 'url:': appimage_url = line[9:] request_url = etcher_url + releases_url + appimage_url output_file = etcher_appname + file_extension run(['wget', '-q', '--show-progress', request_url, '-O', output_file])
def runcli(self,infile=None,timeout=None): """ Used to check expectedoutput execute self.cli with input file (if any) and time (if any) return False,dico in case of a plateforme problem indication are in the dico stdout : reason, stderr: exception return True,dico in case of good execution (eventualy a time out) stdout: stdout, stderr:stderr result: exit value timeout: time_ok_or_not """ try: if infile: entry = open(infile, "rb") cp = subprocess.run(self.cli, input=entry.read(), stdout=subprocess.PIPE,stderr=subprocess.PIPE, timeout=timeout) else: cp = subprocess.run(self.cli, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=timeout) return True,{ "stderr":cp.stderr.decode("utf-8"),"success":(cp.returncode==0),"stdout":cp.stdout.decode("utf-8"),"cp":cp} except subprocess.TimeoutExpired as toe: return True,{"stderr":toe,"result":False,"stdout":"temps d'execution trop long", "timeout":True } except (OSError, IOError) as e: return False,{"stderr":e,"result":False,"stdout":"PlateForme IO ERROR"} except Exception as e: return False,{"stderr":e,"result":False,"stdout":"UnKown ERROR"}
def install_apt(): """Install applications that are available in default apt.""" print('******apt install******') applications = ['git', # Version control. 'geary', # non-gmail email 'evolution', # gmail email. 'quodlibet', # Music player. 'gnome-tweaks', # Finer grained destkop style editing. 'chrome-gnome-shell', # App linking firefox with tweaks. 'syncthing', # Point-to-point back-up manager. 'gnome-shell-timer', # App for focussing productivity. 'transmission-gtk', # Torrent downloader. 'texmaker', # LaTeX editor (for editing resume). 'asunder', # Music CD ripping. 'baobab', # Disk usage analysis. 'nautilus-dropbox', # Cloud storage client 'python3-pip', # Python package manager 'gnome-mines', # Minesweeper game. 'gnome-calendar', # calendar app. # 'atom', # requires custom atom repo ] for app in applications: print('******'+app+'******') run(['sudo', 'apt', 'install', app])
def send_inputs(device,U): """ Sends input values to the microcontroller to actuate them """ Vn = U[0]+U0['v'][0] Fn = U[1]+U0['f'][0] Qn = U[2]+U0['q'][0] Dn = Ug[3]+U0['d'][0] input_string='echo "v,{:.2f}" > /dev/arduino && echo "f,{:.2f}" > /dev/arduino && echo "q,{:.2f}" > /dev/arduino'.format(Vn, Fn, Qn) #subprocess.run('echo -e "v,{:.2f}\nf,{:.2f}\nq,{:.2f}" > /dev/arduino'.format(U[:,0][0]+8, U[:,1][0]+16, U[:,2][0]+1.2), shell=True) device.reset_input_buffer() #device.write("v,{:.2f}\n".format(Vn).encode('ascii')) subprocess.run('echo "" > /dev/arduino', shell=True) time.sleep(0.200) subprocess.run('echo "v,{:.2f}" > /dev/arduino'.format(Vn), shell=True) time.sleep(0.200) #device.write("f,{:.2f}\n".format(Fn).encode('ascii')) subprocess.run('echo "f,{:.2f}" > /dev/arduino'.format(Fn), shell=True) time.sleep(0.200) #device.write("q,{:.2f}\n".format(Qn).encode('ascii')) subprocess.run('echo "q,{:.2f}" > /dev/arduino'.format(Qn), shell=True) #subprocess.call(input_string, shell=True) #print("input: {}".format(input_string)) time.sleep(0.200) subprocess.run('echo "d,{:.2f}" > /dev/arduino'.format(Dn), shell=True) print("input values: {:.2f},{:.2f},{:.2f},{:.2f}".format(Vn,Fn,Qn,Dn))
def nginx_start(): pid = read_file('/var/run/nginx.pid', '-1') cmdline = read_file('/proc/'+pid+'/cmdline') if cmdline.find('nginx') == -1: subprocess.run('nginx', shell=True) else: nginx_restart()
def get_battery(colors): try: with open("/sys/class/power_supply/ACAD/online", "r") as f: adapter_online = int(f.readline().strip()) with open("/sys/class/power_supply/BAT1/capacity", "r") as f: percent = f.readline().strip() out = "" levels = ["\uf08e","\uf07a", "\uf07b", "\uf07c", "\uf07d", "\uf07e", "\uf07f", "\uf080", "\uf081", "\uf082", "\uf079"] icon = levels[(int(percent)//10)] if adapter_online: global charg_step icon = levels[charg_step] if charg_step < 10: charg_step+=1 else: charg_step = int(percent)//10 if int(percent) < 10: global warning color = colors['ERROR_FG'] if warning == 10: subprocess.run(('notify-send','-t','2','-u','critical','Battery Low!\nBattery at {}%'.format(percent))) warning = 0 warning += 1 else: color = colors['DEFAULT_FG'] out += "{} {}%".format(icon, percent) except FileNotFoundError: out = "No battery" color = colors['ERROR_FG'] bar.battery = colorize(out, color)
def tqdb_prepare(): # Open the TQAE key and grab the install location: try: tqae_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, LOOKUP_KEY, 0, winreg.KEY_READ) install = winreg.QueryValueEx(tqae_key, 'InstallLocation')[0] except WindowsError: print('Could not find installation directory for Titan Quest') return # Create the required directories if necessary for d in DIRECTORIES: Path(d).mkdir(parents=True, exist_ok=True) # Run the extraction commands: tool = Path(install, 'ArchiveTool.exe') for c in COMMANDS: input_file = Path(install, c[0]) subprocess.run([ # ArchiveTool.exe in the TQ Install directory str(tool), # Resource ARC file in the TQ Install directory str(input_file), # Extract flag for the ArchiveTool executable '-extract', # Output directory (local data/ dir) str(Path(c[1]).absolute()), ])
def commit(message, allow_empty = False): # Build cmd = ["git", "commit", "-m", message] if allow_empty: cmd.append("--allow-empty") # Run run(cmd, check = True)
def establish_repo(repo_tag, candidate_spec, user_spec): repo_tag_base = repo_tag.split('_')[0] log.debug('') log.info(repo_tag_base.title() + ' repo') if candidate_spec[repo_tag]['local_path'] is None: # The case when local_path is not set. candidate_spec[repo_tag]['local_path_setby'] = 'fork & commitish' dir_for_clone = Path(candidate_spec['repos_dir'] + '/' + repo_tag_base) #print('dir_for_clone: ',dir_for_clone) candidate_spec[repo_tag]['local_path'] = dir_for_clone if dir_for_clone.exists(): delete_dir_and_contents(dir_for_clone) Path.mkdir(dir_for_clone, parents=True) clone_repo(repo_tag, candidate_spec, user_spec, dir_for_clone) # check out the commitish commitish = candidate_spec[repo_tag]['commitish'] if commitish is None: commitish = 'master' log.debug('Checking out commitish: '+commitish) subprocess.run(['git', 'checkout', commitish], cwd=dir_for_clone) git_log = subprocess.run(['git', 'log', '-n1'], stdout=subprocess.PIPE, cwd=dir_for_clone) log.debug(git_log.stdout.decode('utf-8')) else: candidate_spec[repo_tag]['local_path_setby'] = 'candidate spec'