def establish_repo(repo_tag, candidate_spec, user_spec):

    repo_tag_base = repo_tag.split('_')[0]
    log.debug('')
    log.info(repo_tag_base.title() + ' repo')

    if candidate_spec[repo_tag]['local_path'] is None:

        # The case when local_path is not set.
        candidate_spec[repo_tag]['local_path_setby'] = 'fork & commitish'
        dir_for_clone = Path(candidate_spec['repos_dir'] + '/' + repo_tag_base)
        #print('dir_for_clone: ',dir_for_clone)

        candidate_spec[repo_tag]['local_path'] = dir_for_clone

        if dir_for_clone.exists():
            delete_dir_and_contents(dir_for_clone)
        Path.mkdir(dir_for_clone, parents=True)

        clone_repo(repo_tag, candidate_spec, user_spec, dir_for_clone)

        # check out the commitish
        commitish = candidate_spec[repo_tag]['commitish']
        if commitish is None:
            commitish = 'master'

        log.debug('Checking out commitish: '+commitish)
        subprocess.run(['git', 'checkout', commitish], cwd=dir_for_clone)
        git_log = subprocess.run(['git', 'log', '-n1'], stdout=subprocess.PIPE, cwd=dir_for_clone)
        log.debug(git_log.stdout.decode('utf-8'))

    else:

        candidate_spec[repo_tag]['local_path_setby'] = 'candidate spec'
Exemplo n.º 2
0
def commit(message, allow_empty = False):
    # Build
    cmd = ["git", "commit", "-m", message]
    if allow_empty:
        cmd.append("--allow-empty")
    # Run
    run(cmd, check = True)
Exemplo n.º 3
0
def get_battery(colors):
    try:
        with open("/sys/class/power_supply/ACAD/online", "r") as f:
            adapter_online = int(f.readline().strip())
        with open("/sys/class/power_supply/BAT1/capacity", "r") as f:
            percent = f.readline().strip()
        out = ""
        levels = ["\uf08e","\uf07a", "\uf07b", "\uf07c", "\uf07d", "\uf07e", "\uf07f", "\uf080", "\uf081", "\uf082", "\uf079"]
        icon = levels[(int(percent)//10)]
        if adapter_online:
            global charg_step
            icon = levels[charg_step]
            if charg_step < 10:
                charg_step+=1
            else:
                charg_step = int(percent)//10
        if int(percent) < 10:
            global warning
            color = colors['ERROR_FG']
            if warning == 10:
                subprocess.run(('notify-send','-t','2','-u','critical','Battery Low!\nBattery at {}%'.format(percent)))
                warning = 0
            warning += 1
        else:
            color = colors['DEFAULT_FG']
        out += "{} {}%".format(icon, percent)
    except FileNotFoundError:
        out = "No battery"
        color = colors['ERROR_FG']
    bar.battery = colorize(out, color)
Exemplo n.º 4
0
def generate_thumbnail(input_path, output_path):
    # scale to 720:x
    # quality is 5 (1-30)
    # skip first two seconds (in event of dark/black start)
    # only capture one frame
    subprocess.run(['ffmpeg', '-i', input_path, '-filter:v', 'scale=720:-1', '-ss', '2', '-qscale:v', '5', '-vframes', '1', output_path], capture_output=True)
    print(f'Created thumbnail at: {output_path}')
Exemplo n.º 5
0
def main(gbdir, outdir):
    os.makedirs(gbdir, exist_ok=True)
    os.makedirs(outdir, exist_ok=True)
    tempq = 'tempquery.fasta'
    tempdb = 'tempdb.fasta'
    for org in tqdm(Organism.objects.all()):
        # get genbank and convert to fasta
        fpath = os.path.join(gbdir, '{}.gb'.format(org.accession))
        if not os.path.isfile(fpath):
            print('\nFetching {} with accession {}'.format(
                org.name,
                org.accession
            ))
            fetch(fpath)
        SeqIO.convert(fpath, 'genbank', tempdb, 'fasta')
        # get spacers of organism and convert to fasta
        spacers = Spacer.objects.filter(loci__organism=org)
        fastatext = ''.join(['>{}\n{}\n'.format(spacer.id, spacer.sequence)
                             for spacer in spacers])
        with open(tempq, 'w') as f:
            f.write(fastatext)
        # run blast and save output
        outpath = os.path.join(outdir, '{}.json'.format(org.accession))
        commandargs = ['blastn', '-query', tempq,
                       '-subject', tempdb, '-out', outpath, '-outfmt', '15']
        subprocess.run(commandargs, stdout=subprocess.DEVNULL)

    os.remove(tempq)
    os.remove(tempdb)
Exemplo n.º 6
0
def send_inputs(device,U):
  """
  Sends input values to the microcontroller to actuate them
  """
  Vn = U[0]+U0['v'][0]
  Fn = U[1]+U0['f'][0]
  Qn = U[2]+U0['q'][0]
  Dn = Ug[3]+U0['d'][0]
  input_string='echo "v,{:.2f}" > /dev/arduino && echo "f,{:.2f}" > /dev/arduino && echo "q,{:.2f}" > /dev/arduino'.format(Vn, Fn, Qn)
  #subprocess.run('echo -e "v,{:.2f}\nf,{:.2f}\nq,{:.2f}" > /dev/arduino'.format(U[:,0][0]+8, U[:,1][0]+16, U[:,2][0]+1.2), shell=True)
  device.reset_input_buffer()
  #device.write("v,{:.2f}\n".format(Vn).encode('ascii'))
  subprocess.run('echo "" > /dev/arduino', shell=True)
  time.sleep(0.200)
  subprocess.run('echo "v,{:.2f}" > /dev/arduino'.format(Vn), shell=True)
  time.sleep(0.200)
  #device.write("f,{:.2f}\n".format(Fn).encode('ascii'))
  subprocess.run('echo "f,{:.2f}" > /dev/arduino'.format(Fn), shell=True)
  time.sleep(0.200)
  #device.write("q,{:.2f}\n".format(Qn).encode('ascii'))
  subprocess.run('echo "q,{:.2f}" > /dev/arduino'.format(Qn), shell=True)
  #subprocess.call(input_string,  shell=True)
  #print("input: {}".format(input_string))
  time.sleep(0.200)
  subprocess.run('echo "d,{:.2f}" > /dev/arduino'.format(Dn), shell=True)
  print("input values: {:.2f},{:.2f},{:.2f},{:.2f}".format(Vn,Fn,Qn,Dn))
Exemplo n.º 7
0
 def __init__(self, path):
     """Create a new named pipe."""
     if os.path.exists(path):
         raise FileExistsError("Named pipe {} already exists.".format(path))
     cmd = 'mkfifo ' + path
     run(cmd, shell=True, check=True)
     self.path = path
Exemplo n.º 8
0
	def runcli(self,infile=None,timeout=None):
		"""
		Used to check expectedoutput
		execute self.cli with input file (if any) and time (if any)
		return False,dico in case of a plateforme problem indication are in the dico stdout : reason, stderr: exception
		return True,dico in case of good execution (eventualy a time out)
		 stdout: stdout, stderr:stderr result: exit value
		 timeout: time_ok_or_not
		"""

		try:
			if infile:
				entry = open(infile, "rb")
				cp = subprocess.run(self.cli, input=entry.read(),
					stdout=subprocess.PIPE,stderr=subprocess.PIPE,
					timeout=timeout)
			else:
				cp = subprocess.run(self.cli,
					stdin=subprocess.DEVNULL,
					stdout=subprocess.PIPE,
					stderr=subprocess.PIPE,
					timeout=timeout)
			return True,{ "stderr":cp.stderr.decode("utf-8"),"success":(cp.returncode==0),"stdout":cp.stdout.decode("utf-8"),"cp":cp}
		except subprocess.TimeoutExpired as toe:
			return True,{"stderr":toe,"result":False,"stdout":"temps d'execution trop long", "timeout":True }
		except (OSError, IOError) as e:
			return False,{"stderr":e,"result":False,"stdout":"PlateForme IO ERROR"}
		except Exception as e:
			return False,{"stderr":e,"result":False,"stdout":"UnKown  ERROR"}
def run_synthtool(ctx: Context) -> None:
    """Runs synthtool for the initial client generation."""
    subprocess.run(
        [sys.executable, "synth.py"],
        check=True,
        cwd=ctx.root_directory / "google-cloud-clients" / ctx.google_cloud_artifact
    )
Exemplo n.º 10
0
def build_windows():
    """Build windows executables/setups."""
    utils.print_title("Updating 3rdparty content")
    update_3rdparty.run(ace=False, pdfjs=True, fancy_dmg=False)

    utils.print_title("Building Windows binaries")
    parts = str(sys.version_info.major), str(sys.version_info.minor)
    ver = ''.join(parts)
    dot_ver = '.'.join(parts)

    # Get python path from registry if possible
    try:
        reg64_key = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE,
                                     r'SOFTWARE\Python\PythonCore'
                                     r'\{}\InstallPath'.format(dot_ver))
        python_x64 = winreg.QueryValueEx(reg64_key, 'ExecutablePath')[0]
    except FileNotFoundError:
        python_x64 = r'C:\Python{}\python.exe'.format(ver)

    out_pyinstaller = os.path.join('dist', 'qutebrowser')
    out_64 = os.path.join('dist',
                          'qutebrowser-{}-x64'.format(qutebrowser.__version__))

    artifacts = []

    from scripts.dev import gen_versioninfo
    utils.print_title("Updating VersionInfo file")
    gen_versioninfo.main()

    utils.print_title("Running pyinstaller 64bit")
    _maybe_remove(out_64)
    call_tox('pyinstaller', '-r', python=python_x64)
    shutil.move(out_pyinstaller, out_64)

    utils.print_title("Running 64bit smoke test")
    smoke_test(os.path.join(out_64, 'qutebrowser.exe'))

    utils.print_title("Building installers")
    subprocess.run(['makensis.exe',
                    '/DX64',
                    '/DVERSION={}'.format(qutebrowser.__version__),
                    'misc/qutebrowser.nsi'], check=True)

    name_64 = 'qutebrowser-{}-amd64.exe'.format(qutebrowser.__version__)

    artifacts += [
        (os.path.join('dist', name_64),
         'application/vnd.microsoft.portable-executable',
         'Windows 64bit installer'),
    ]

    utils.print_title("Zipping 64bit standalone...")
    name = 'qutebrowser-{}-windows-standalone-amd64'.format(
        qutebrowser.__version__)
    shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_64))
    artifacts.append(('{}.zip'.format(name),
                      'application/zip',
                      'Windows 64bit standalone'))

    return artifacts
Exemplo n.º 11
0
def cmd_restore(args):
	'''
		Restore a given archive into all the container's volumes.
	'''

	print('\nrestoring {} for {}\n'.format(args.archive, args.container))

	# Ensure that the repository exists
	if not path.isdir(args.repository):
		raise BasementException('no backup to restore from')

	# Ensure that the *archive* exists
	if run(
		['borg', 'info', '{}::{}'.format(args.repository, args.archive)],
		stdout=DEVNULL,
		stderr=DEVNULL
	).returncode != 0:
		raise BasementException('archive {} does not exist for this backup'.format(args.archive))

	if not args.no_remove:
		# Delete everything in the target mounts to prepare for a clean restore.
		mounts = map(lambda m: m.split(':')[1], get_binds(args.container))
		for m in mounts:
			# Only empty directories, as file volumes will be overwritten.
			if path.isdir(m):
				# print('rm -rf {pth}/* {pth}/.*'.format(pth=m))
				run('rm -rf {pth}/* {pth}/.* 2>/dev/null'.format(pth=m), shell=True)

	run([
		'borg',
		'extract',
		'{}::{}'.format(args.repository, args.archive)
	], cwd=DIR_BACKUPS)
Exemplo n.º 12
0
def main():
    args = parseArgs()
    passwd = readpass(confirm=args.new)
    if args.new:
        makeNewVeil(args.file, passwd)

    editDir = tempfile.mkdtemp()
    editFile = os.path.join(editDir, 'edit')
    decrypt(args.file, editFile, passwd)
    try:
        with tempfile.TemporaryDirectory() as d:
            origFile = os.path.join(d, 'orig')
            shutil.copyfile(editFile, origFile)

            subprocess.run([args.editor, editFile], check=True)

            if sameFileContent(origFile, editFile):
                print(args.file, 'not changed.')
            else:
                subprocess.run([args.diff, origFile, editFile], check=True)
                if confirmOverwrite(args.file):
                    encrypt(editFile, args.file, passwd)
                    print(args.file, 'overwritten.')
                else:
                    print('Discarded changes to {}.'.format(args.file))
    except:
        print('Preserving file:', editFile)
        raise
    else:
        shutil.rmtree(editDir)
Exemplo n.º 13
0
def create_db_image(drucker):
    """Create database image from database container"""
    print(
        colorful.white_on_blue(
            "Committing %s image from %s container..."
            % (drucker.vars.DB_IMAGE, drucker.vars.DB_CONTAINER)
        )
    )

    subprocess.run(
        'docker commit -m "%s on %s" %s %s'
        % (
            drucker.vars.DB_CONTAINER,
            str(date.today()),
            drucker.vars.DB_CONTAINER,
            drucker.vars.DB_IMAGE,
        ),
        shell=True,
    )

    print(colorful.white_on_blue("Deleting initial container..."))
    subprocess.getoutput(
        "docker rm -f %s > /dev/null 2>&1" % (drucker.vars.DB_CONTAINER)
    )
    create_db_container(drucker)
Exemplo n.º 14
0
def main():
    # Check for clean local working tree
    status_res = run(['git', 'status', '--short'], stdout=PIPE)
    entries = [e.strip() for e in status_res.stdout.decode('utf-8').split('\n') if e]
    for entry in entries:
        [status, path] = entry.split(' ')
        if status != '??':
            print('Working directory is not clean')


    # List unmerged Git branches
    branch_list_res = run(['git', 'branch', '--no-merged'], stdout=PIPE)
    if branch_list_res.returncode:
        raise "Listing remote branches failed"

    branch_list = [b.decode('utf-8').strip()
        for b in branch_list_res.stdout.strip().split(b'\n')]

    # Rebase each branch in turn
    onto_branch = 'master'
    for branch in branch_list:
        co_result = run(['git', 'checkout', branch], stdout=PIPE)
        if co_result.returncode:
            print('{} - Checkout failed'.format(branch))
            return

        rebase_result = run(['git', 'rebase', onto_branch], stdout=PIPE)
        if rebase_result.returncode:
            abort_result = run(['git', 'rebase', '--abort'])
            if abort_result.returncode:
                print('Rebasing {} failed'.format(abort_result))
                return
            print('{} - Auto-rebase failed'.format(branch))
        else:
            print('{} - Rebased'.format(branch))
Exemplo n.º 15
0
def test_pyplot_up_to_date():
    gen_script = Path(mpl.__file__).parents[2] / "tools/boilerplate.py"
    if not gen_script.exists():
        pytest.skip("boilerplate.py not found")
    orig_contents = Path(plt.__file__).read_text()
    try:
        subprocess.run([sys.executable, str(gen_script)], check=True)
        new_contents = Path(plt.__file__).read_text()

        if orig_contents != new_contents:
            diff_msg = '\n'.join(
                difflib.unified_diff(
                    orig_contents.split('\n'), new_contents.split('\n'),
                    fromfile='found pyplot.py',
                    tofile='expected pyplot.py',
                    n=0, lineterm=''))
            pytest.fail(
                "pyplot.py is not up-to-date. Please run "
                "'python tools/boilerplate.py' to update pyplot.py. "
                "This needs to be done from an environment where your "
                "current working copy is installed (e.g. 'pip install -e'd). "
                "Here is a diff of unexpected differences:\n%s" % diff_msg
            )
    finally:
        Path(plt.__file__).write_text(orig_contents)
Exemplo n.º 16
0
def tqdb_prepare():
    # Open the TQAE key and grab the install location:
    try:
        tqae_key = winreg.OpenKey(
            winreg.HKEY_LOCAL_MACHINE, LOOKUP_KEY, 0, winreg.KEY_READ)
        install = winreg.QueryValueEx(tqae_key, 'InstallLocation')[0]
    except WindowsError:
        print('Could not find installation directory for Titan Quest')
        return

    # Create the required directories if necessary
    for d in DIRECTORIES:
        Path(d).mkdir(parents=True, exist_ok=True)

    # Run the extraction commands:
    tool = Path(install, 'ArchiveTool.exe')
    for c in COMMANDS:
        input_file = Path(install, c[0])
        subprocess.run([
            # ArchiveTool.exe in the TQ Install directory
            str(tool),
            # Resource ARC file in the TQ Install directory
            str(input_file),
            # Extract flag for the ArchiveTool executable
            '-extract',
            # Output directory (local data/ dir)
            str(Path(c[1]).absolute()),
        ])
Exemplo n.º 17
0
def _send_xo_cmd(cmd_str):
    LOGGER.info('Sending xo cmd')
    subprocess.run(
        shlex.split(cmd_str),
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE,
        check=True)
Exemplo n.º 18
0
def install_apt():
    """Install applications that are available in default apt."""
    print('******apt install******')
    applications = ['git',  # Version control.
                    'geary',  # non-gmail email
                    'evolution',  # gmail email.
                    'quodlibet',  # Music player.
                    'gnome-tweaks',  # Finer grained destkop style editing.
                    'chrome-gnome-shell',  # App linking firefox with tweaks.
                    'syncthing',  # Point-to-point back-up manager.
                    'gnome-shell-timer',  # App for focussing productivity.
                    'transmission-gtk',  # Torrent downloader.
                    'texmaker',  # LaTeX editor (for editing resume).
                    'asunder',  # Music CD ripping.
                    'baobab',  # Disk usage analysis.
                    'nautilus-dropbox',  # Cloud storage client
                    'python3-pip',  # Python package manager

                    'gnome-mines',  # Minesweeper game.
                    'gnome-calendar',  # calendar app.
                    # 'atom',  # requires custom atom repo
                   ]
    for app in applications:
        print('******'+app+'******')
        run(['sudo', 'apt', 'install', app])
Exemplo n.º 19
0
def nginx_start():
    pid = read_file('/var/run/nginx.pid', '-1')
    cmdline = read_file('/proc/'+pid+'/cmdline')
    if cmdline.find('nginx') == -1:
        subprocess.run('nginx', shell=True)
    else:
        nginx_restart()
Exemplo n.º 20
0
def make_and_push_tag(version):
    """Tag the current commit and push that tag to origin"""
    click.confirm(
        "Push tag '%s' to origin?" % version, default=True, abort=True
    )
    run(['git', 'tag', "-s", "v%s" % version], check=True)
    run(['git', 'push', '--tags', 'origin'], check=True)
Exemplo n.º 21
0
def install_app_images():
    """Install all applications that are not available in apt by default."""

    # download_directory = '/home/tom/'
    file_extension = '.AppImage'

    # balenaEtcher (https://github.com/balena-io/etcher)
    print('******etcher******')
    # TODO compare versions before downloading.
    # Download the latest AppImage from github
    etcher_appname = 'etcher'
    etcher_url = 'https://github.com/balena-io/etcher/'
    releases_url = 'releases/latest/download/'
    reference_url = 'latest-linux.yml'
    request_url = etcher_url + releases_url + reference_url
    # download yml content and convert to a string
    yml = requests.get(request_url).content.decode()  
    for line in yml.split('\n'):
        if line[4:8] == 'url:':
            appimage_url = line[9:]
    request_url = etcher_url + releases_url + appimage_url
    output_file = etcher_appname + file_extension
    run(['wget', '-q', '--show-progress',
                    request_url,
                    '-O', output_file])
Exemplo n.º 22
0
def tikz2image(tikz_src, filetype, outfile):
    tmpdir = mkdtemp()
    olddir = os.getcwd()
    os.chdir(tmpdir)

    # Write tikz.tex file.
    with open('tikz.tex', 'w') as f:
        f.write( '\n'.join( 
            [ "\\RequirePackage{luatex85,shellesc}"
                , "\\documentclass{standalone}"
                , "\\usepackage{tikz}"
                , "\\usepackage[sfdefault]{firasans}"
                , "\\usepackage[small,euler-digits]{eulervm}"
                , "\\usepackage{pgfplots}"
                , "\\pgfplotslibrary[]{units,groupplots}"
                , "\\begin{document}" ] 
            ))
        f.write(tikz_src)
        f.write("\n\\end{document}\n")

    subprocess.run( ["latexmk", "-pdf", "-lualatex", '--shell-escape', '-silent', 'tikz.tex']
            , stdout=sys.stderr
            )
    os.chdir(olddir)
    if filetype == 'pdf':
        shutil.copyfile(tmpdir + '/tikz.pdf', outfile + '.pdf')
    else:
        subprocess.run(["convert", tmpdir + '/tikz.pdf', outfile + '.' + filetype])
    shutil.rmtree(tmpdir)
Exemplo n.º 23
0
 def tearDown(self):
     cmd = [
         "mysql",
         "-u", "root",
         "-e", "DROP DATABASE charakoba_api;"
     ]
     shell.run(cmd)
Exemplo n.º 24
0
def main_check_all():
    """Check the coverage for all files individually.

    This makes sure the files have 100% coverage without running unrelated
    tests.

    This runs pytest with the used executable, so check_coverage.py should be
    called with something like ./.tox/py36/bin/python.
    """
    for test_file, src_file in PERFECT_FILES:
        if test_file is None:
            continue
        subprocess.run(
            [sys.executable, '-m', 'pytest', '--cov', 'qutebrowser',
             '--cov-report', 'xml', test_file], check=True)
        with open('coverage.xml', encoding='utf-8') as f:
            messages = check(f, [(test_file, src_file)])
        os.remove('coverage.xml')

        messages = [msg for msg in messages
                    if msg.typ == MsgType.insufficent_coverage]
        if messages:
            for msg in messages:
                print(msg.text)
            return 1
        else:
            print("Check ok!")
    return 0
Exemplo n.º 25
0
def runTask(tmpTask):
    if tmpTask == "Add Multimedia to Server":
        tmpVar = readFile("Sorter", "", "", recordFile)
    elif tmpTask == "Open Web Browser":
        webbrowser.open("www.google.com")
        '''
        with urllib.request.urlopen("http://www.espn.com") as response:
            html = response.read()
            print(html)
        '''
    elif tmpTask == "Perform System Maintenance":
        # check to see if has admin rights
        try:
            is_admin = os.getuid() == 0
        except:
            is_admin = ctypes.windll.shell32.IsUserAnAdmin()

        if is_admin == 0:
            print("Please close program and 'Run as Administrator'!")
        else:
            userResponse = input("Run system cleanup?... yes/no: ")
            if userResponse.upper() == "YES":
                subprocess.run("cleanmgr")
            userResponse = input("Run defrag of local drives?... yes/no: ")
            if userResponse.upper() == "YES":
                defrag()
            userResponse = input("Check system files?... yes/no: ")
            if userResponse.upper() == "YES":
                subprocess.run("sfc /scannow")

    elif tmpTask == "Port Scanner":
        scanports()

    else:
        print("Could not complete that task... Contact Admin!")
Exemplo n.º 26
0
def batchPdfConversion(SourceFolder,DestinationFolder):

    # ***create pdfs
    files = [file for file in os.listdir(SourceFolder) if (os.path.splitext(file)[1] == ".md" and os.path.splitext(file)[0] != "index")]



    folders = [folder for folder in os.listdir(SourceFolder) if (os.path.isdir(os.path.join(SourceFolder,folder)) and not folder.startswith("__") and not folder.startswith(".") and folder != "assets")]


    if os.path.exists(DestinationFolder):
        shutil.rmtree(DestinationFolder)

    os.makedirs(DestinationFolder)

    #outer
    if files:
        for file in files:
            print("starting conversion: " + file + " to pdf...")
            command = ['pandoc',"--variable","fontsize=14pt","--variable","documentclass=extarticle",os.path.join(SourceFolder,file),'--latex-engine=xelatex','--template=./assets/me.latex','-o',os.path.join(DestinationFolder,replaceMdByPdf(file))]
            subprocess.run(command)
            print("conversion completed: " + file + " to pdf...")

    #inner
    for folder in folders:
        os.makedirs(os.path.join(DestinationFolder,folder))
        filess = [file for file in os.listdir(os.path.join(SourceFolder,folder)) if (os.path.splitext(file)[1] == ".md" and os.path.splitext(file)[0] != "index")]

        for file in filess:
            print("starting conversion: " + file + " to pdf...")
            command = ['pandoc',"--variable","fontsize=14pt","--variable","documentclass=extarticle",os.path.join(SourceFolder,folder,file),'--latex-engine=xelatex','--template=./assets/me.latex','--highlight-style=pygments','-o',os.path.join(DestinationFolder,folder,replaceMdByPdf(file))]
            subprocess.run(command)
            print("conversion completed: " + file + " to pdf...")

    # ***combine pdfs
    #outer
    files = [file for file in os.listdir(DestinationFolder) if (os.path.splitext(file)[1] == ".pdf") ]

    if files:
        merger = PyPDF2.PdfFileMerger()

        for filename in files:
            print("combining " + filename)
            merger.append(PyPDF2.PdfFileReader(open(os.path.join(DestinationFolder,filename),'rb')))
            print("combined " + filename)

        merger.write(os.path.join(DestinationFolder,"notes.pdf"))
    #inner
    folders = [folder for folder in os.listdir(DestinationFolder) if (os.path.isdir(os.path.join(SourceFolder,folder)) and folder.startswith("__") and folder != "assets")]

    for folder in folders:
        files = [file for file in os.listdir(os.path.join(DestinationFolder,folder)) if(os.path.splitext(file)[1] == ".pdf")]
        merger = PyPDF2.PdfFileMerger()
        for filename in files:
            print("combining " + filename)
            merger.append(PyPDF2.PdfFileReader(open(os.path.join(DestinationFolder,folder,filename),'rb')))
            print("combined " + filename)
        merger.write(os.path.join(DestinationFolder,folder,sanitizeFoldername(folder) + ".pdf"))

    print("=======PDfs generated========")
Exemplo n.º 27
0
def main_check():
    """Check coverage after a test run."""
    try:
        with open('coverage.xml', encoding='utf-8') as f:
            messages = check(f, PERFECT_FILES)
    except Skipped as e:
        print(e)
        messages = []

    if messages:
        print()
        print()
        scriptutils.print_title("Coverage check failed")
        for msg in messages:
            print(msg.text)
        print()
        filters = ','.join('qutebrowser/' + msg.filename for msg in messages)
        subprocess.run([sys.executable, '-m', 'coverage', 'report',
                        '--show-missing', '--include', filters], check=True)
        print()
        print("To debug this, run 'tox -e py36-pyqt59-cov' "
              "(or py35-pyqt59-cov) locally and check htmlcov/index.html")
        print("or check https://codecov.io/github/qutebrowser/qutebrowser")
        print()

    if 'CI' in os.environ:
        print("Keeping coverage.xml on CI.")
    else:
        os.remove('coverage.xml')
    return 1 if messages else 0
Exemplo n.º 28
0
def path_source_reference(path_source_in_repo, variables):
    """
    Copy over media in repo to temp folder (this allows symlinking later)
    Some files are missing from the source set and need to be derived when this fixture is called
    """
    tempdir = tempfile.TemporaryDirectory()

    test_media_filenames = set(os.listdir(path_source_in_repo))
    for filename in test_media_filenames:
        shutil.copy2(os.path.join(path_source_in_repo, filename), os.path.join(tempdir.name, filename))

    # Derive other test media
    if 'test1.mp4' not in test_media_filenames:
        # TODO: use `variables` to aquire `cmd_ffmpeg`
        cmd = ('ffmpeg', '-f', 'image2', '-framerate', '0.1', '-i', os.path.join(path_source_in_repo, 'test1_%03d.png'), '-f', 'lavfi', '-i', 'anullsrc', '-shortest', '-c:a', 'aac', '-strict', 'experimental', '-r', '10', '-s', '640x480', os.path.join(tempdir.name, 'test1.mp4'))
        cmd_result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=20)
        assert os.path.isfile(os.path.join(tempdir.name, 'test1.mp4'))

    if 'test2.ogg' not in test_media_filenames:
        # TODO: use `variables` to aquire `cmd_sox`
        cmd = ('sox', '-n', '-r', '44100', '-c', '2', '-L', os.path.join(tempdir.name, 'test2.ogg'), 'trim', '0.0', '15.000')
        cmd_result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=20)
        assert os.path.isfile(os.path.join(tempdir.name, 'test2.ogg'))

    yield tempdir.name
    tempdir.cleanup()
def runBLAST(results_dir,queryfile):
    my_genome = basename(results_dir)
    blast_cmd = ["blastn", "-task", "megablast", "-db", my_genome,
                 "-outfmt", "5", "-max_target_seqs", "1",
                 "-query", queryfile, "-out",
                 "./results.xml"]
    run(blast_cmd, cwd=results_dir)
Exemplo n.º 30
0
def make_upload(test=True):
    """Upload to PyPI or test.pypi"""
    if test:
        cmd = ['make', 'test-upload']
        url = 'https://test.pypi.org'
    else:
        url = 'https://pypi.org'
        cmd = ['make', 'upload']
    click.confirm(
        "Ready to upload release to %s?" % url, default=True, abort=True
    )
    success = False
    while not success:
        try:
            run(cmd, check=True)
        except CalledProcessError as exc_info:
            click.confirm(
                "Failed to upload: %s. Try again?" % str(exc_info),
                default=True,
                abort=(not test),
            )
            success = False
        else:
            success = True
            click.confirm(
                "Please check release on %s. Continue?" % url,
                default=True,
                abort=True,
            )
def ping_ip(ip):
    result = subprocess.run(["ping", "-c", "3", "-n", ip], stdout=subprocess.DEVNULL)
    ip_is_reachable = result.returncode == 0
    return ip_is_reachable
Exemplo n.º 32
0
def runcmd(command):
    subprocess.run(command, stdout=subprocess.PIPE, shell=True)
Exemplo n.º 33
0
    def run_bash_command(self, cmd):
        def expand_alias(match):
            if not match:
                return None
            if 'll' in match.groups():
                return 'ls -AhlgF --group-directories-first'
            if 'la' in match.groups():
                return 'ls -Agp --group-directories-first'
            if 'l' in match.groups():
                return 'ls -Fp --group-directories-first'
            if 't' in match.groups():
                return 'tree --dirsfirst -FaL 2'
            if 'g' in match.groups():
                return 'grep'
            if 'kll' in match.groups():
                return 'pkill'
            return None

        def add_colors(match):
            if not match:
                return None
            if 'tree' in match.groups():
                return 'tree -C'
            if 'ls' in match.groups():
                return 'ls --color=always'
            if 'grep' in match.groups():
                return 'grep --color=always'
            return None

        alias_patterns = [
            r'l[la]?',  # ls
            r't',  # tree
            r'g',  # grep
            r'kll',  # pkill
        ]

        for ptrn in alias_patterns:
            cmd = re.sub(
                fr'((^{ptrn})(?!\S))|((?<=[|&;]\s)(?:\s*)({ptrn})(?!\S))',
                expand_alias, cmd)

        color_patterns = [r'tree', r'ls', r'grep']

        for ptrn in color_patterns:
            without_pipe = fr'((^{ptrn})(?!\S)(?!.*\|))'
            with_pipe = fr'((?<=[|&;]\s)(?:\s*)({ptrn})(?!\S))'
            cmd = re.sub(fr'{without_pipe}|{with_pipe}', add_colors, cmd)

        proc = subprocess.run(cmd,
                              shell=True,
                              universal_newlines=True,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)

        if proc.returncode == 0:
            self.resultobj.set_result(self.mode_id,
                                      self.edit_text,
                                      'success',
                                      presentation=proc.stdout)
        else:
            self.resultobj.set_result(self.mode_id,
                                      self.edit_text,
                                      'failure',
                                      description=proc.stderr)
Exemplo n.º 34
0
def test_pytest():
    """Runs pytest on the train."""
    subprocess.run(["pytest"])
Exemplo n.º 35
0
def test_pytest_with_plugins():
    """Runs pytest with plugins on the train."""
    subprocess.run(["pytest", "--ignore=migrations", "--black", "--isort", "--flakes"])
Exemplo n.º 36
0
def start_simulation(folder_name, line_number):
	copy_wp=None
	copy_rp=None
	copy_np=None
	copy_wg=None
	print('compiling world plugin')
	world_plugin = subprocess.run("cd sources/w_swarm1/build;rm -rf *;cmake ../;make",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	if world_plugin.returncode:
		print('************world plugin compilation failed')
		print(world_plugin.stderr)
	else:
		print('world plugin compilation successful')
		copy_wp = subprocess.run("cp sources/w_swarm1/build/libwp_swarm1.so ./compiled_plugins",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	print()

	print('compiling robot plugin')
	robot_plugin = subprocess.run("cd sources/mp_swarm1/build;rm -rf *;cmake ../;make",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	if robot_plugin.returncode:
		print('************robot plugin compilation failed')
		print(robot_plugin.stderr)
	else:
		print('robot plugin compilation successful')
		copy_rp = subprocess.run("cp sources/mp_swarm1/build/libmp_swarm1.so ./compiled_plugins",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	print()

	print('compiling nest plugin')
	nest_plugin = subprocess.run("cd sources/m_nest/build;rm -rf *;cmake ../;make",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	if nest_plugin.returncode:
		print('************nest plugin compilation failed')
		print(nest_plugin.stderr)
	else:
		print('nest plugin compilation successful')
		copy_np = subprocess.run("cp sources/m_nest/build/libnest_plugin.so ./compiled_plugins",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	print()

	print('compiling world governor')
	world_governor = subprocess.run("cd sources/world_governor/build;rm -rf *;cmake ../;make",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
	if world_governor.returncode:
		print('*************world governor compilation failed')
		print(world_governor.stdout)
	else:
		print('world governor compilation successful')
		subprocess.run("rm world_governor",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
		copy_wg = subprocess.run("cp sources/world_governor/build/world_governor .",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
		#print(copy_wg.stderr)
		

	all_set = sum([world_plugin.returncode,copy_wp.returncode,\
				robot_plugin.returncode,copy_rp.returncode,\
				nest_plugin.returncode,copy_np.returncode,\
				world_governor.returncode,copy_wg.returncode])

	#start up gazebo if all processes are successful
	if(all_set == 0):
		load_world = subprocess.Popen("./start_simulation.sh",stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)#w_swarm1.world
		
		if load_world.returncode==None:
			load_governor = subprocess.Popen("./world_governor {} {}".format(folder_name,line_number),stdin=subprocess.PIPE,stderr=subprocess.PIPE,stdout=subprocess.PIPE,shell=True)
			if load_governor.returncode==None:
				print('''
				\n\n
				***********************************************
				All set: Simulation started and Governor loaded
				***********************************************
				\n\n''')

		while True:
			load_governor.poll()
			load_world.poll()
			#print(load_governor.returncode ,load_world.returncode)
			if load_governor.returncode != None or load_world.returncode != None:
				load_governor.kill()
				load_world.kill()
				print('''
				********************************
					Simulation Terminated
					World Governor = {}
					World Status   = {}
				Process killed because of non "None"
				value.
				********************************
				'''.format(load_governor.returncode,load_world.returncode))
				#print('world governor status: ',load_governor.returncode,'world status: ',load_world.returncode)
				break
	else:
		print(all_set)
Exemplo n.º 37
0
        'resize set height {}'.format(height),
        'move position center'
    ])

def disable_zen_mode(workspace_previous):
    return ';'.join([
        'move container to workspace {}'.format(workspace_previous),
        'workspace {}'.format(workspace_previous),
        'floating disable',
        'border normal'
    ])

def goto_zen(workspace_name):
    return 'workspace {}'.format(workspace_name)

out = subprocess.run(['i3-msg', '-t', 'get_workspaces'], capture_output=True)
workspaces = json.loads(out.stdout)

workspace_current = list(filter(lambda w: w['focused'], workspaces))[0]
workspace_zen = list(filter(lambda w: w['num'] == ZEN_NUMBER, workspaces))

if __name__ == '__main__':
    if len(sys.argv) > 1:
        if len(workspace_zen):
            msg = disable_zen_mode(workspace_zen[0]['name'].split(':')[1].strip())
    else:
        if len(workspace_zen):
            msg = goto_zen(workspace_zen[0]['name'])
        else:
            msg = enable_zen_mode(workspace_current['num'],
                                  workspace_current['rect']['width'],
Exemplo n.º 38
0
def run_test(
    ctx: Context,
    specpath: str,
    streamlit_command: List[str],
    show_output: bool = False,
) -> bool:
    """Run a single e2e test.

     An e2e test consists of a Streamlit script that produces a result, and
     a Cypress test file that asserts that result is as expected.

    Parameters
    ----------
    ctx : Context
        The Context object that contains our global testing parameters.
    specpath : str
        The path of the Cypress spec file to run.
    streamlit_command : list of str
        The Streamlit command to run (passed directly to subprocess.Popen()).

    Returns
    -------
    bool
        True if the test succeeded.

    """
    SUCCESS = "SUCCESS"
    RETRY = "RETRY"
    SKIP = "SKIP"
    QUIT = "QUIT"

    result = None

    # Move existing credentials file aside, and create a new one if the
    # tests call for it.
    with move_aside_file(CREDENTIALS_FILE):
        create_credentials_toml('[general]\nemail="*****@*****.**"')

        # Loop until the test succeeds or is skipped.
        while result not in (SUCCESS, SKIP, QUIT):
            cypress_command = ["yarn", "cy:run", "--spec", specpath]
            cypress_command.extend(["--reporter", "cypress-circleci-reporter"])
            cypress_command.extend(ctx.cypress_flags)

            click.echo(
                f"{click.style('Running test:', fg='yellow', bold=True)}"
                f"\n{click.style(' '.join(streamlit_command), fg='yellow')}"
                f"\n{click.style(' '.join(cypress_command), fg='yellow')}"
            )

            # Start the streamlit command
            with AsyncSubprocess(streamlit_command, cwd=FRONTEND_DIR) as streamlit_proc:
                # Run the Cypress spec to completion.
                cypress_result = subprocess.run(
                    cypress_command,
                    cwd=FRONTEND_DIR,
                    capture_output=True,
                    text=True,
                )

                # Terminate the streamlit command and get its output
                streamlit_stdout = streamlit_proc.terminate()

            def print_output():
                click.echo(
                    f"\n\n{click.style('Streamlit output:', fg='yellow', bold=True)}"
                    f"\n{streamlit_stdout}"
                    f"\n\n{click.style('Cypress output:', fg='yellow', bold=True)}"
                    f"\n{cypress_result.stdout}"
                    f"\n"
                )

            if cypress_result.returncode == 0:
                result = SUCCESS
                click.echo(click.style("Success!\n", fg="green", bold=True))
                if show_output:
                    print_output()
            else:
                # The test failed. Print the output of the Streamlit command
                # and the Cypress command.
                click.echo(click.style("Failure!", fg="red", bold=True))
                print_output()

                if ctx.always_continue:
                    result = SKIP
                else:
                    # Prompt the user for what to do next.
                    user_input = click.prompt(
                        "[R]etry, [U]pdate snapshots, [S]kip, or [Q]uit?",
                        default="r",
                    )
                    key = user_input[0].lower()
                    if key == "s":
                        result = SKIP
                    elif key == "q":
                        result = QUIT
                    elif key == "r":
                        result = RETRY
                    elif key == "u":
                        ctx.update_snapshots = True
                        result = RETRY
                    else:
                        # Retry if key not recognized
                        result = RETRY

    if result != SUCCESS:
        ctx.any_failed = True

    if result == QUIT:
        raise QuitException()

    return result == SUCCESS
Exemplo n.º 39
0
def stage0(task_path, result_path, temp_path=None, consume_task_folder=False):
    config = worker_config()
    cgs = ControlGroupSystem()
    task = KolejkaTask(task_path)
    if not task.id:
        task.id = uuid.uuid4().hex
        logging.warning('Assigned id {} to the task'.format(task.id))
    if not task.image:
        logging.error('Task does not define system image')
        sys.exit(1)
    if not task.args:
        logging.error('Task does not define args')
        sys.exit(1)
    if not task.files.is_local:
        logging.error('Task contains non-local files')
        sys.exit(1)
    limits = KolejkaLimits()
    limits.cpus = config.cpus
    limits.memory = config.memory
    limits.swap = config.swap
    limits.pids = config.pids
    limits.storage = config.storage
    limits.image = config.image
    limits.workspace = config.workspace
    limits.time = config.time
    limits.network = config.network
    limits.gpus = config.gpus
    task.limits.update(limits)

    docker_task = 'kolejka_worker_{}'.format(task.id)

    docker_cleanup = [
        ['docker', 'kill', docker_task],
        ['docker', 'rm', docker_task],
    ]

    with tempfile.TemporaryDirectory(dir=temp_path) as jailed_path:
        #TODO jailed_path size remains unlimited?
        logging.debug('Using {} as temporary directory'.format(jailed_path))
        jailed_task_path = os.path.join(jailed_path, 'task')
        os.makedirs(jailed_task_path, exist_ok=True)
        jailed_result_path = os.path.join(jailed_path, 'result')
        os.makedirs(jailed_result_path, exist_ok=True)

        jailed = KolejkaTask(os.path.join(jailed_path, 'task'))
        jailed.load(task.dump())
        jailed.files.clear()
        volumes = list()
        check_python_volume()
        if os.path.exists(OBSERVER_SOCKET):
            volumes.append((OBSERVER_SOCKET, OBSERVER_SOCKET, 'rw'))
        else:
            logging.warning('Observer is not running.')
        volumes.append(
            (jailed_result_path, os.path.join(WORKER_DIRECTORY,
                                              'result'), 'rw'))
        for key, val in task.files.items():
            if key != TASK_SPEC:
                src_path = os.path.join(task.path, val.path)
                dst_path = os.path.join(jailed_path, 'task', key)
                os.makedirs(os.path.dirname(dst_path), exist_ok=True)
                if consume_task_folder:
                    shutil.move(src_path, dst_path)
                else:
                    shutil.copy(src_path, dst_path)
                jailed.files.add(key)
        jailed.files.add(TASK_SPEC)
        #jailed.limits = KolejkaLimits() #TODO: Task is limited by docker, no need to limit it again?
        jailed.commit()
        volumes.append((jailed.path, os.path.join(WORKER_DIRECTORY,
                                                  'task'), 'rw'))
        if consume_task_folder:
            try:
                shutil.rmtree(task_path)
            except:
                logging.warning('Failed to remove {}'.format(task_path))
                pass
        for spath in [os.path.dirname(__file__)]:
            stage1 = os.path.join(spath, 'stage1.sh')
            if os.path.isfile(stage1):
                volumes.append(
                    (stage1, os.path.join(WORKER_DIRECTORY,
                                          'stage1.sh'), 'ro'))
                break
        for spath in [os.path.dirname(__file__)]:
            stage2 = os.path.join(spath, 'stage2.py')
            if os.path.isfile(stage2):
                volumes.append(
                    (stage2, os.path.join(WORKER_DIRECTORY,
                                          'stage2.py'), 'ro'))
                break

        docker_call = ['docker', 'run']
        docker_call += ['--detach']
        docker_call += ['--name', docker_task]
        docker_call += [
            '--entrypoint',
            os.path.join(WORKER_DIRECTORY, 'stage1.sh')
        ]
        for key, val in task.environment.items():
            docker_call += ['--env', '{}={}'.format(key, val)]
        docker_call += ['--hostname', WORKER_HOSTNAME]
        docker_call += ['--init']
        if task.limits.cpus is not None:
            docker_call += [
                '--cpuset-cpus', ','.join([
                    str(c) for c in cgs.limited_cpuset(cgs.full_cpuset(
                    ), task.limits.cpus, task.limits.cpus_offset)
                ])
            ]

        if task.limits.gpus is not None and task.limits.gpus > 0:
            check_gpu_runtime_availability()
            gpus = ','.join(
                map(
                    str,
                    limited_gpuset(full_gpuset(), task.limits.gpus,
                                   task.limits.gpus_offset)))
            docker_call += [
                '--runtime=nvidia', '--shm-size=1g', '--gpus',
                f'"device={gpus}"'
            ]

        if task.limits.memory is not None:
            docker_call += ['--memory', str(task.limits.memory)]
            if task.limits.swap is not None:
                docker_call += [
                    '--memory-swap',
                    str(task.limits.memory + task.limits.swap)
                ]
        if task.limits.storage is not None:
            docker_info_run = subprocess.run(
                ['docker', 'system', 'info', '--format', '{{json .Driver}}'],
                stdout=subprocess.PIPE,
                check=True)
            storage_driver = str(
                json.loads(str(docker_info_run.stdout, 'utf-8')))
            if storage_driver == 'overlay2':
                docker_info_run = subprocess.run([
                    'docker', 'system', 'info', '--format',
                    '{{json .DriverStatus}}'
                ],
                                                 stdout=subprocess.PIPE,
                                                 check=True)
                storage_fs = dict(
                    json.loads(str(docker_info_run.stdout,
                                   'utf-8')))['Backing Filesystem']
                if storage_fs in ['xfs']:
                    storage_limit = task.limits.storage
                    docker_call += [
                        '--storage-opt', 'size=' + str(storage_limit)
                    ]
                else:
                    logging.warning(
                        "Storage limit on {} ({}) is not supported".format(
                            storage_driver, storage_fs))
            else:
                logging.warning("Storage limit on {} is not supported".format(
                    storage_driver))
        if task.limits.network is not None:
            if not task.limits.network:
                docker_call += ['--network=none']
        docker_call += ['--cap-add', 'SYS_NICE']
        if task.limits.pids is not None:
            docker_call += ['--pids-limit', str(task.limits.pids)]
        if task.limits.time is not None:
            docker_call += [
                '--stop-timeout',
                str(int(math.ceil(task.limits.time.total_seconds())))
            ]
        docker_call += [
            '--volume',
            '{}:{}:{}'.format(WORKER_PYTHON_VOLUME,
                              os.path.join(WORKER_DIRECTORY, 'python3'), 'ro')
        ]
        for v in volumes:
            docker_call += [
                '--volume', '{}:{}:{}'.format(os.path.realpath(v[0]), v[1],
                                              v[2])
            ]
        docker_call += ['--workdir', WORKER_DIRECTORY]
        docker_image = task.image
        docker_call += [docker_image]
        docker_call += ['--consume']
        if config.debug:
            docker_call += ['--debug']
        if config.verbose:
            docker_call += ['--verbose']
        docker_call += [os.path.join(WORKER_DIRECTORY, 'task')]
        docker_call += [os.path.join(WORKER_DIRECTORY, 'result')]
        logging.debug('Docker call : {}'.format(docker_call))

        pull_image = config.pull
        if not pull_image:
            docker_inspect_run = subprocess.run(
                ['docker', 'image', 'inspect', docker_image],
                stdout=subprocess.DEVNULL,
                stderr=subprocess.STDOUT)
            if docker_inspect_run.returncode != 0:
                pull_image = True
        if pull_image:
            subprocess.run(['docker', 'pull', docker_image], check=True)

        for docker_clean in docker_cleanup:
            silent_call(docker_clean)

        if os.path.exists(result_path):
            shutil.rmtree(result_path)
        os.makedirs(result_path, exist_ok=True)
        result = KolejkaResult(result_path)
        result.id = task.id
        result.limits = task.limits
        result.stdout = task.stdout
        result.stderr = task.stderr

        start_time = datetime.datetime.now()
        docker_run = subprocess.run(docker_call, stdout=subprocess.PIPE)
        cid = str(docker_run.stdout, 'utf-8').strip()
        logging.info('Started container {}'.format(cid))

        try:
            if task.limits.gpus is not None and task.limits.gpus > 0:
                result.stats.update(
                    gpu_stats(gpus=limited_gpuset(full_gpuset(
                    ), task.limits.gpus, task.limits.gpus_offset)))
        except:
            pass
        time.sleep(0.1)

        while True:
            try:
                docker_state_run = subprocess.run(
                    ['docker', 'inspect', '--format', '{{json .State}}', cid],
                    stdout=subprocess.PIPE)
                state = json.loads(str(docker_state_run.stdout, 'utf-8'))
            except:
                break
            try:
                result.stats.update(cgs.name_stats(cid))

                if task.limits.gpus is not None and task.limits.gpus > 0:
                    result.stats.update(
                        gpu_stats(gpus=limited_gpuset(full_gpuset(
                        ), task.limits.gpus, task.limits.gpus_offset)))
            except:
                pass
            time.sleep(0.1)
            if not state['Running']:
                result.result = state['ExitCode']
                try:
                    result.stats.time = dateutil.parser.parse(
                        state['FinishedAt']) - dateutil.parser.parse(
                            state['StartedAt'])
                except:
                    result.stats.time = None
                break
            if task.limits.time is not None and datetime.datetime.now(
            ) - start_time > task.limits.time + datetime.timedelta(seconds=2):
                docker_kill_run = subprocess.run(
                    ['docker', 'kill', docker_task])
        subprocess.run(['docker', 'logs', cid], stdout=subprocess.PIPE)
        try:
            summary = KolejkaResult(jailed_result_path)
            result.stats.update(summary.stats)
        except:
            pass

        stop_time = datetime.datetime.now()
        if result.stats.time is None:
            result.stats.time = stop_time - start_time
        result.stats.pids.usage = None
        result.stats.memory.usage = None
        result.stats.memory.swap = None

        for dirpath, dirnames, filenames in os.walk(jailed_result_path):
            for filename in filenames:
                abspath = os.path.join(dirpath, filename)
                realpath = os.path.realpath(abspath)
                if realpath.startswith(
                        os.path.realpath(jailed_result_path) + '/'):
                    relpath = abspath[len(jailed_result_path) + 1:]
                    if relpath != RESULT_SPEC:
                        destpath = os.path.join(result.path, relpath)
                        os.makedirs(os.path.dirname(destpath), exist_ok=True)
                        shutil.move(realpath, destpath)
                        os.chmod(destpath, 0o640)
                        result.files.add(relpath)
        result.commit()
        os.chmod(result.spec_path, 0o640)

        for docker_clean in docker_cleanup:
            silent_call(docker_clean)
Exemplo n.º 40
0
def main():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        '--android-deps-dir',
        help='Path to directory containing build.gradle from chromium-dir.',
        default=_DEFAULT_ANDROID_DEPS_DIR)
    parser.add_argument(
        '--build-dir',
        help='Path to build directory (default is temporary directory).')
    parser.add_argument('--ignore-licenses',
                        help='Ignores licenses for these deps.',
                        action='store_true')
    parser.add_argument('--ignore-vulnerabilities',
                        help='Ignores vulnerabilities for these deps.',
                        action='store_true')
    parser.add_argument('-v',
                        '--verbose',
                        dest='verbose_count',
                        default=0,
                        action='count',
                        help='Verbose level (multiple times for more)')
    args = parser.parse_args()

    logging.basicConfig(
        level=logging.WARNING - 10 * args.verbose_count,
        format='%(levelname).1s %(relativeCreated)6d %(message)s')
    debug = args.verbose_count >= 2

    args.android_deps_dir = NormalizeAndroidDepsDir(args.android_deps_dir)

    abs_android_deps_dir = os.path.normpath(
        os.path.join(_CHROMIUM_SRC, args.android_deps_dir))

    # The list of files and dirs that are copied to the build directory by this
    # script. Should not include _UPDATED_ANDROID_DEPS_FILES.
    copied_paths = {
        _GLOBAL_GRADLE_BUILDSRC_PATH:
        os.path.join(args.android_deps_dir, "buildSrc"),
        _GLOBAL_GRADLE_SUPRESSIONS_PATH:
        os.path.join(args.android_deps_dir, "vulnerability_supressions.xml"),
    }

    if not args.ignore_licenses:
        copied_paths[_GLOBAL_LICENSE_SUBDIR] = _GLOBAL_LICENSE_SUBDIR

    missing_files = []
    for src_path in copied_paths.keys():
        if not os.path.exists(os.path.join(_CHROMIUM_SRC, src_path)):
            missing_files.append(src_path)
    for android_deps_file in _UPDATED_ANDROID_DEPS_FILES:
        if not os.path.exists(
                os.path.join(abs_android_deps_dir, android_deps_file)):
            missing_files.append(android_deps_file)
    if missing_files:
        raise Exception('Missing files from {}: {}'.format(
            _CHROMIUM_SRC, missing_files))

    with BuildDir(args.build_dir) as build_dir:
        build_android_deps_dir = os.path.join(build_dir, args.android_deps_dir)

        logging.info('Using build directory: %s', build_dir)
        for android_deps_file in _UPDATED_ANDROID_DEPS_FILES:
            CopyFileOrDirectory(
                os.path.join(abs_android_deps_dir, android_deps_file),
                os.path.join(build_android_deps_dir, android_deps_file))

        for path, dest in copied_paths.items():
            CopyFileOrDirectory(os.path.join(_CHROMIUM_SRC, path),
                                os.path.join(build_dir, dest))

        if debug:
            gradle_cmd.append('--debug')

        if not args.ignore_vulnerabilities:
            report_dst = os.path.join(abs_android_deps_dir,
                                      'vulnerability_reports')
            _CheckVulnerabilities(build_android_deps_dir, report_dst)

        logging.info('Running Gradle.')

        # Path to the gradlew script used to run build.gradle.
        abs_gradle_wrapper_path = os.path.join(_CHROMIUM_SRC, 'third_party',
                                               'gradle_wrapper', 'gradlew')

        # This gradle command generates the new DEPS and BUILD.gn files, it can
        # also handle special cases.
        # Edit BuildConfigGenerator.groovy#addSpecialTreatment for such cases.
        gradle_cmd = [
            abs_gradle_wrapper_path,
            '-b',
            os.path.join(build_android_deps_dir, _BUILD_GRADLE),
            'setupRepository',
            '--stacktrace',
        ]
        if debug:
            gradle_cmd.append('--debug')
        if args.ignore_licenses:
            gradle_cmd.append('-PskipLicenses=true')

        subprocess.run(gradle_cmd, check=True)

        build_libs_dir = os.path.join(build_android_deps_dir, _LIBS_DIR)

        logging.info('# Reformat %s.',
                     os.path.join(args.android_deps_dir, _BUILD_GN))
        gn_args = [
            'gn', 'format',
            os.path.join(build_android_deps_dir, _BUILD_GN)
        ]
        RunCommand(gn_args, print_stdout=debug)

        logging.info('# Jetify all libraries.')
        aar_files = FindInDirectory(build_libs_dir, '*.aar')
        jar_files = FindInDirectory(build_libs_dir, '*.jar')
        jetify_android_deps = build_libs_dir
        if args.android_deps_dir != _DEFAULT_ANDROID_DEPS_DIR:
            jetify_android_deps += ':' + os.path.join(
                _CHROMIUM_SRC, _DEFAULT_ANDROID_DEPS_DIR, _LIBS_DIR)
        _JetifyAll(aar_files + jar_files, jetify_android_deps)

        logging.info('# Generate Android .aar info files.')
        _CreateAarInfos(aar_files)

        if not args.ignore_licenses:
            logging.info('# Looking for nested license files.')
            for aar_file in aar_files:
                # Play Services .aar files have embedded licenses.
                with zipfile.ZipFile(aar_file) as z:
                    if _THIRD_PARTY_LICENSE_FILENAME in z.namelist():
                        aar_dirname = os.path.dirname(aar_file)
                        license_path = os.path.join(aar_dirname, 'LICENSE')
                        # Make sure to append as we don't want to lose the
                        # existing license.
                        with open(license_path, 'ab') as f:
                            f.write(z.read(_THIRD_PARTY_LICENSE_FILENAME))

        logging.info('# Compare CIPD packages.')
        existing_packages = ParseDeps(abs_android_deps_dir, _LIBS_DIR)
        build_packages = ParseDeps(build_android_deps_dir, _LIBS_DIR)

        deleted_packages = []
        updated_packages = []
        for pkg in sorted(existing_packages):
            if pkg not in build_packages:
                deleted_packages.append(pkg)
            else:
                existing_info = existing_packages[pkg]
                build_info = build_packages[pkg]
                if existing_info.tag != build_info.tag:
                    updated_packages.append(pkg)

        new_packages = sorted(set(build_packages) - set(existing_packages))

        # Generate CIPD package upload commands.
        logging.info('Querying %d CIPD packages', len(build_packages))
        cipd_commands = _GenerateCipdUploadCommands(
            args.android_deps_dir,
            (build_packages[pkg] for pkg in build_packages))

        # Copy updated DEPS and BUILD.gn to build directory.
        update_cmds = []
        for updated_file in _UPDATED_ANDROID_DEPS_FILES:
            CopyFileOrDirectory(
                os.path.join(build_android_deps_dir, updated_file),
                os.path.join(abs_android_deps_dir, updated_file))

        # Delete obsolete or updated package directories.
        for pkg in existing_packages.values():
            pkg_path = os.path.join(abs_android_deps_dir, pkg.path)
            DeleteDirectory(pkg_path)

        # Copy new and updated packages from build directory.
        for pkg in build_packages.values():
            pkg_path = pkg.path
            dst_pkg_path = os.path.join(abs_android_deps_dir, pkg_path)
            src_pkg_path = os.path.join(build_android_deps_dir, pkg_path)
            CopyFileOrDirectory(src_pkg_path, dst_pkg_path)

        # Useful for printing timestamp.
        logging.info('All Done.')

        if new_packages:
            PrintPackageList(new_packages, 'new')
        if updated_packages:
            PrintPackageList(updated_packages, 'updated')
        if deleted_packages:
            PrintPackageList(deleted_packages, 'deleted')

        if cipd_commands:
            print('Run the following to upload CIPD packages:')
            print('-------------------- cut here ------------------------')
            print('\n'.join(cipd_commands))
            print('-------------------- cut here ------------------------')
        else:
            print('Done. All packages were already up-to-date on CIPD')
Exemplo n.º 41
0
def download_and_run_bootstrapper():
    bootstrapper_url = BOOTSTRAPPER_URL
    bootstrapper_filename = "bootstrapper.jar"

    subprocess.run(["curl", "-L", bootstrapper_url, "--output", bootstrapper_filename])
    subprocess.run(["java", "-jar", "bootstrapper.jar"])
Exemplo n.º 42
0
    fileServers:
     -
      operation: "all"
      url: "file://{work_dir}/outputs/local-site"
""".format(run_id=RUN_ID,
           work_dir=str(WORK_DIR),
           condor_pool_pegasus_home="/usr")

with open("sites.yml", "w") as f:
    f.write(sites)

# --- Transformations ----------------------------------------------------------

try:
    pegasus_config = subprocess.run(["pegasus-config", "--bin"],
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE)
except FileNotFoundError as e:
    print("Unable to find pegasus-config")

assert pegasus_config.returncode == 0

PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip()

transformations = """
pegasus: "5.0"
transformations:
 -
  namespace: "diamond"
  name: "analyze"
  version: "4.0"
Exemplo n.º 43
0
allgroups.sort(key=lambda x: x.lower())

print("\b\nComputing first-time contributors...", end='', file=sys.stderr)

ext_contributors = individual_authors - crdb_folk
firsttime_contributors = []
for a in individual_authors:
    # Find all aliases known for this person
    aliases = a.aliases
    # Collect the history for every alias
    hist = b''
    for al in aliases:
        spin()
        cmd = subprocess.run([
            "git", "log",
            "--author=%s <%s>" % al, options.from_commit, '-n', '1'
        ],
                             stdout=subprocess.PIPE,
                             check=True)
        hist += cmd.stdout
    if len(hist) == 0:
        # No commit from that author older than the first commit
        # selected, so that's a first-time author.
        firsttime_contributors.append(a)

print("\b\n", file=sys.stderr)
sys.stderr.flush()

#
# Presentation of results.
#
Exemplo n.º 44
0
def silent_call(*args, **kwargs):
    kwargs['stdin'] = kwargs.get('stdin', subprocess.DEVNULL)
    kwargs['stdout'] = kwargs.get('stderr', subprocess.DEVNULL)
    kwargs['stderr'] = kwargs.get('stdout', subprocess.DEVNULL)
    return subprocess.run(*args, **kwargs)
Exemplo n.º 45
0
def git_add_and_commit(msg: str, repo: Path) -> None:
    subprocess.run(["git", "add", "."], cwd=repo)
    subprocess.run(["git", "commit", "-m", msg, "--allow-empty"], cwd=repo)
        indices.append(arr_len * i // parts_count)
    indices.append(arr_len)
    return indices


def get_arr_parts(arr, indices):
    arr_parts = []
    for i in range(1, len(indices)):
        arr_parts.append(arr[indices[i - 1]:indices[i]])
    return arr_parts


if __name__ == '__main__':
    current_time = time.time()
    readable_current_time = time.ctime(current_time)
    subprocess.run(["mkdir", "./logs/{}".format(current_time)])
    logging.basicConfig(level=logging.INFO)
    loggers = []
    for name in range(processes_number):
        logger = logging.getLogger(str(name))
        log_name = "logs/{}/{}-{}.log".format(current_time,
                                              readable_current_time, name)
        open(log_name, 'w').close()
        fh = logging.FileHandler(log_name)
        logger.addHandler(fh)
        loggers.append(logger)

    with open("warcs_list.txt", 'r') as f:
        names = f.readlines()
        indices = get_indices(len(names), processes_number)
        names_parts = get_arr_parts(names, indices)
Exemplo n.º 47
0
#Labeling the coordinates
tri_coords = np.vstack((A,B,C,O,I)).T
plt.scatter(tri_coords[0,:], tri_coords[1,:])
vert_labels = ['A','B','C','O','I']
for i, txt in enumerate(vert_labels):
    plt.annotate(txt, # this is the text
                 (tri_coords[0,i], tri_coords[1,i]), # this is the point to label
                 textcoords="offset points", # how to position the text
                 xytext=(0,10), # distance from text to points (x,y)
                 ha='center') # horizontal alignment can be left, right or center

plt.xlabel('$x$')
plt.ylabel('$y$')
plt.legend(loc='best')
plt.grid() # minor
plt.axis('equal')

#if using termux
plt.savefig('./figs/tri_sss.pdf')
plt.savefig('./figs/tri_sss.eps')
subprocess.run(shlex.split("termux-open ./figs/tri_sss.pdf"))
#else
#plt.show()







Exemplo n.º 48
0
def start_subprocess_print(li, sleepbefore=2, cwd=None):
    print("Will execute command after {}s: \n\t{}".format(
        sleepbefore, " ".join(li)))
    time.sleep(sleepbefore)
    subprocess.run(li, cwd=cwd)
Exemplo n.º 49
0
#!/usr/bin/env python3
import subprocess

# subprocess.call(['ls', '-a'])  # 成功すれば返り値は1
# subprocess.run(['ls', '-a'])  # コマンドの結果が返り値

# 出力を得る
res = subprocess.run(['ls', '-a'], stdout=subprocess.PIPE)
# sys.stdout.buffer.write(res.stdout)
print(res.stdout)
Exemplo n.º 50
0
def git_create_repository(repo: Path) -> None:
    subprocess.run(["git", "init"], cwd=repo)
    git_add_and_commit(msg="Initial commit", repo=repo)
Exemplo n.º 51
0
def start():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description='A tool for generating QEMU comand lines.')
    parser.add_argument('--qemu', default=DEFAULT_QEMU, help=OPT_QEMU_HELP)
    parser.add_argument('-s',
                        '--smp',
                        type=int,
                        metavar='N',
                        help=OPT_SMP_HELP)
    parser.add_argument('-A',
                        '--no-accel',
                        action='store_true',
                        help='disable accelaration such KVM')
    parser.add_argument('-m',
                        '--memory',
                        type=int,
                        default=DEFAULT_MEM_SIZE,
                        metavar='M',
                        help=OPT_MEMORY_HELP)
    parser.add_argument('-d',
                        '--drive',
                        nargs='+',
                        action='append',
                        default=[],
                        metavar=('FILE', 'OPT'),
                        help=OPT_DRIVE_HELP)
    parser.add_argument('-u',
                        '--net-user',
                        action='store_true',
                        help=OPT_NET_USER_HELP)
    parser.add_argument('-f',
                        '--host-forward',
                        action='append',
                        default=[],
                        metavar='PORTS',
                        help=OPT_HOST_FWD_HELP)
    parser.add_argument('-t',
                        '--tap',
                        nargs='?',
                        metavar=('MAC_FILE'),
                        const=NO_MAC_FILE,
                        help=OPT_TAP_HELP)
    parser.add_argument('-B',
                        '--bridges',
                        action='append',
                        metavar='BR',
                        help=OPT_BRIDGES_HELP)
    # This default path is for Ubuntu
    parser.add_argument('-H',
                        '--bridge-helper',
                        default='/usr/lib/qemu/qemu-bridge-helper',
                        metavar='HELPER',
                        help=OPT_BRIDGE_HELPER_HELP)
    parser.add_argument('-c',
                        '--cdrom',
                        metavar='FILE',
                        help='add a CD-ROM drive with an ISO image file')
    parser.add_argument('-a',
                        '--sound',
                        type=str,
                        nargs='?',
                        default=None,
                        const=DEFAULT_SOUND_DEV,
                        metavar='DEVICE',
                        help=OPT_SOUND_HELP)
    parser.add_argument('-k',
                        '--kernel',
                        nargs='+',
                        metavar=('vmlinuz', 'cmdline'),
                        help='boot Linux kernel directory')
    parser.add_argument('-i', '--initrd', metavar='FILE', help=OPT_INITRD_HELP)
    parser.add_argument('-n',
                        '--nographic',
                        action='store_true',
                        help=OPT_NOGRAPHIC_HELP)
    parser.add_argument('-b', '--boot', metavar='DRIVE', help=OPT_BOOT_HELP)
    parser.add_argument('--menu', action='store_true', help=OPT_MENU_HELP)
    parser.add_argument('-M',
                        '--monitor',
                        nargs='?',
                        metavar='DEV',
                        const=DEFAULT_MONITOR_DEV,
                        help=OPT_MONITOR_HELP)
    parser.add_argument('-g',
                        '--gdb',
                        metavar='PORT',
                        nargs='?',
                        const=DEFAULT_GDB_PORT,
                        help=OPT_GDB_HELP)
    parser.add_argument('-S',
                        '--serial',
                        choices=['mon:stdio', 'pts'],
                        help='add a serial port')
    parser.add_argument('-vnc', type=int, metavar='N', help=OPT_VNC_HELP)
    parser.add_argument('-W',
                        '--wait',
                        action='store_true',
                        help=OPT_WAIT_HELP)
    parser.add_argument('-usb',
                        nargs=2,
                        type=int,
                        metavar=('BUS', 'DEV'),
                        help=OPT_USB_HELP)
    parser.add_argument('--sudo', action='store_true', help='execute via sudo')

    parser.add_argument('-e',
                        '--execute',
                        action='store_true',
                        help=OPT_EXEC_HELP)

    parser.add_argument('additional', nargs='*', help=OPT_ADDITIONAL_HELP)

    # This is for internal use
    parser.add_argument('--subprocess',
                        action='store_true',
                        help=argparse.SUPPRESS)

    args = parser.parse_args()
    if args.sudo and not args.subprocess:
        cmd = ['sudo']
        cmd.extend(sys.argv)
        cmd.append('--subprocess')
    else:
        cmd = generate(args).get_arguments()

    print(cmd)
    if args.execute:
        for mac in ctx.mac_list:
            mac.save_if_needed()
        subprocess.run(cmd)
Exemplo n.º 52
0
def cmd(command):
    subprocess.run(command, shell=True)
Exemplo n.º 53
0
    if file[-5:len(file)] != ".json":
        continue

    print("---")
    print(file + ":")

    shell = True
    output_file = output_path / (file + ".yaml")
    expected_result_file = tests_path / (file[0:-4] + "yaml")
    command = compiler_command.split(" ")
    command.extend([str(tests_path / file), "--output", str(output_file)])

    if platform.system() == "Linux":
        shell = False

    result = subprocess.run(command, shell=shell, cwd=os.getcwd())

    if result.returncode != 0:
        print("Error while running compiler")
        continue

    with open(str(expected_result_file),
              "r") as file_expect, open(str(output_file), "r") as file_result:
        read_exp = read_file(file_expect)
        read_res = read_file(file_result)

        while True:
            line_exp = next(read_exp, None)
            line_res = next(read_res, None)

            if line_exp == None or line_res == None:
#!/usr/bin/env python3

import json
import os
import subprocess

gcloud_completed_process = subprocess.run(
    ['gcloud', 'compute', 'instances', 'list', '--format', 'json'],
    check=True,
    stdout=subprocess.PIPE
)

gce_instances = json.loads(gcloud_completed_process.stdout.decode('utf-8'))

inventory = {
    '_meta': {
        'hostvars': {}
    },
    'all': {
        'hosts': [],
        'children': [
            'ungrouped'
        ]
    },
    'ungrouped': []
}

for instance in gce_instances:
    # pull hostvars out of instance
    hostvars = {}
    hostvars['zone'] = os.path.basename(instance['zone'])
Exemplo n.º 55
0
def get_extensions():
    this_dir = os.path.dirname(os.path.abspath(__file__))
    extensions_dir = os.path.join(this_dir, "torchvision", "csrc")

    main_file = glob.glob(os.path.join(extensions_dir, "*.cpp")) + glob.glob(
        os.path.join(extensions_dir, "ops", "*.cpp")
    )
    source_cpu = (
        glob.glob(os.path.join(extensions_dir, "ops", "autograd", "*.cpp"))
        + glob.glob(os.path.join(extensions_dir, "ops", "cpu", "*.cpp"))
        + glob.glob(os.path.join(extensions_dir, "ops", "quantized", "cpu", "*.cpp"))
    )

    is_rocm_pytorch = False

    if torch.__version__ >= "1.5":
        from torch.utils.cpp_extension import ROCM_HOME

        is_rocm_pytorch = (torch.version.hip is not None) and (ROCM_HOME is not None)

    if is_rocm_pytorch:
        from torch.utils.hipify import hipify_python

        hipify_python.hipify(
            project_directory=this_dir,
            output_directory=this_dir,
            includes="torchvision/csrc/ops/cuda/*",
            show_detailed=True,
            is_pytorch_extension=True,
        )
        source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "hip", "*.hip"))
        # Copy over additional files
        for file in glob.glob(r"torchvision/csrc/ops/cuda/*.h"):
            shutil.copy(file, "torchvision/csrc/ops/hip")
    else:
        source_cuda = glob.glob(os.path.join(extensions_dir, "ops", "cuda", "*.cu"))

    source_cuda += glob.glob(os.path.join(extensions_dir, "ops", "autocast", "*.cpp"))

    sources = main_file + source_cpu
    extension = CppExtension

    compile_cpp_tests = os.getenv("WITH_CPP_MODELS_TEST", "0") == "1"
    if compile_cpp_tests:
        test_dir = os.path.join(this_dir, "test")
        models_dir = os.path.join(this_dir, "torchvision", "csrc", "models")
        test_file = glob.glob(os.path.join(test_dir, "*.cpp"))
        source_models = glob.glob(os.path.join(models_dir, "*.cpp"))

        test_file = [os.path.join(test_dir, s) for s in test_file]
        source_models = [os.path.join(models_dir, s) for s in source_models]
        tests = test_file + source_models
        tests_include_dirs = [test_dir, models_dir]

    define_macros = []

    extra_compile_args = {"cxx": []}
    if (torch.cuda.is_available() and ((CUDA_HOME is not None) or is_rocm_pytorch)) or os.getenv(
        "FORCE_CUDA", "0"
    ) == "1":
        extension = CUDAExtension
        sources += source_cuda
        if not is_rocm_pytorch:
            define_macros += [("WITH_CUDA", None)]
            nvcc_flags = os.getenv("NVCC_FLAGS", "")
            if nvcc_flags == "":
                nvcc_flags = []
            else:
                nvcc_flags = nvcc_flags.split(" ")
        else:
            define_macros += [("WITH_HIP", None)]
            nvcc_flags = []
        extra_compile_args["nvcc"] = nvcc_flags

    if sys.platform == "win32":
        define_macros += [("torchvision_EXPORTS", None)]
        define_macros += [("USE_PYTHON", None)]
        extra_compile_args["cxx"].append("/MP")

    debug_mode = os.getenv("DEBUG", "0") == "1"
    if debug_mode:
        print("Compile in debug mode")
        extra_compile_args["cxx"].append("-g")
        extra_compile_args["cxx"].append("-O0")
        if "nvcc" in extra_compile_args:
            # we have to remove "-OX" and "-g" flag if exists and append
            nvcc_flags = extra_compile_args["nvcc"]
            extra_compile_args["nvcc"] = [f for f in nvcc_flags if not ("-O" in f or "-g" in f)]
            extra_compile_args["nvcc"].append("-O0")
            extra_compile_args["nvcc"].append("-g")

    sources = [os.path.join(extensions_dir, s) for s in sources]

    include_dirs = [extensions_dir]

    ext_modules = [
        extension(
            "torchvision._C",
            sorted(sources),
            include_dirs=include_dirs,
            define_macros=define_macros,
            extra_compile_args=extra_compile_args,
        )
    ]
    if compile_cpp_tests:
        ext_modules.append(
            extension(
                "torchvision._C_tests",
                tests,
                include_dirs=tests_include_dirs,
                define_macros=define_macros,
                extra_compile_args=extra_compile_args,
            )
        )

    # ------------------- Torchvision extra extensions ------------------------
    vision_include = os.environ.get("TORCHVISION_INCLUDE", None)
    vision_library = os.environ.get("TORCHVISION_LIBRARY", None)
    vision_include = vision_include.split(os.pathsep) if vision_include is not None else []
    vision_library = vision_library.split(os.pathsep) if vision_library is not None else []
    include_dirs += vision_include
    library_dirs = vision_library

    # Image reading extension
    image_macros = []
    image_include = [extensions_dir]
    image_library = []
    image_link_flags = []

    if sys.platform == "win32":
        image_macros += [("USE_PYTHON", None)]

    # Locating libPNG
    libpng = distutils.spawn.find_executable("libpng-config")
    pngfix = distutils.spawn.find_executable("pngfix")
    png_found = libpng is not None or pngfix is not None
    print(f"PNG found: {png_found}")
    if png_found:
        if libpng is not None:
            # Linux / Mac
            png_version = subprocess.run([libpng, "--version"], stdout=subprocess.PIPE)
            png_version = png_version.stdout.strip().decode("utf-8")
            print(f"libpng version: {png_version}")
            png_version = parse_version(png_version)
            if png_version >= parse_version("1.6.0"):
                print("Building torchvision with PNG image support")
                png_lib = subprocess.run([libpng, "--libdir"], stdout=subprocess.PIPE)
                png_lib = png_lib.stdout.strip().decode("utf-8")
                if "disabled" not in png_lib:
                    image_library += [png_lib]
                png_include = subprocess.run([libpng, "--I_opts"], stdout=subprocess.PIPE)
                png_include = png_include.stdout.strip().decode("utf-8")
                _, png_include = png_include.split("-I")
                print(f"libpng include path: {png_include}")
                image_include += [png_include]
                image_link_flags.append("png")
            else:
                print("libpng installed version is less than 1.6.0, disabling PNG support")
                png_found = False
        else:
            # Windows
            png_lib = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "lib")
            png_include = os.path.join(os.path.dirname(os.path.dirname(pngfix)), "include", "libpng16")
            image_library += [png_lib]
            image_include += [png_include]
            image_link_flags.append("libpng")

    # Locating libjpeg
    (jpeg_found, jpeg_conda, jpeg_include, jpeg_lib) = find_library("jpeglib", vision_include)

    print(f"JPEG found: {jpeg_found}")
    image_macros += [("PNG_FOUND", str(int(png_found)))]
    image_macros += [("JPEG_FOUND", str(int(jpeg_found)))]
    if jpeg_found:
        print("Building torchvision with JPEG image support")
        image_link_flags.append("jpeg")
        if jpeg_conda:
            image_library += [jpeg_lib]
            image_include += [jpeg_include]

    # Locating nvjpeg
    # Should be included in CUDA_HOME for CUDA >= 10.1, which is the minimum version we have in the CI
    nvjpeg_found = (
        extension is CUDAExtension
        and CUDA_HOME is not None
        and os.path.exists(os.path.join(CUDA_HOME, "include", "nvjpeg.h"))
    )

    print(f"NVJPEG found: {nvjpeg_found}")
    image_macros += [("NVJPEG_FOUND", str(int(nvjpeg_found)))]
    if nvjpeg_found:
        print("Building torchvision with NVJPEG image support")
        image_link_flags.append("nvjpeg")

    image_path = os.path.join(extensions_dir, "io", "image")
    image_src = (
        glob.glob(os.path.join(image_path, "*.cpp"))
        + glob.glob(os.path.join(image_path, "cpu", "*.cpp"))
        + glob.glob(os.path.join(image_path, "cuda", "*.cpp"))
    )

    if png_found or jpeg_found:
        ext_modules.append(
            extension(
                "torchvision.image",
                image_src,
                include_dirs=image_include + include_dirs + [image_path],
                library_dirs=image_library + library_dirs,
                define_macros=image_macros,
                libraries=image_link_flags,
                extra_compile_args=extra_compile_args,
            )
        )

    ffmpeg_exe = distutils.spawn.find_executable("ffmpeg")
    has_ffmpeg = ffmpeg_exe is not None
    # FIXME: Building torchvision with ffmpeg on MacOS or with Python 3.9
    # FIXME: causes crash. See the following GitHub issues for more details.
    # FIXME: https://github.com/pytorch/pytorch/issues/65000
    # FIXME: https://github.com/pytorch/vision/issues/3367
    if sys.platform != "linux" or (sys.version_info.major == 3 and sys.version_info.minor == 9):
        has_ffmpeg = False
    if has_ffmpeg:
        try:
            # This is to check if ffmpeg is installed properly.
            subprocess.check_output(["ffmpeg", "-version"])
        except subprocess.CalledProcessError:
            print("Error fetching ffmpeg version, ignoring ffmpeg.")
            has_ffmpeg = False

    print(f"FFmpeg found: {has_ffmpeg}")

    if has_ffmpeg:
        ffmpeg_libraries = {"libavcodec", "libavformat", "libavutil", "libswresample", "libswscale"}

        ffmpeg_bin = os.path.dirname(ffmpeg_exe)
        ffmpeg_root = os.path.dirname(ffmpeg_bin)
        ffmpeg_include_dir = os.path.join(ffmpeg_root, "include")
        ffmpeg_library_dir = os.path.join(ffmpeg_root, "lib")

        gcc = os.environ.get("CC", distutils.spawn.find_executable("gcc"))
        platform_tag = subprocess.run([gcc, "-print-multiarch"], stdout=subprocess.PIPE)
        platform_tag = platform_tag.stdout.strip().decode("utf-8")

        if platform_tag:
            # Most probably a Debian-based distribution
            ffmpeg_include_dir = [ffmpeg_include_dir, os.path.join(ffmpeg_include_dir, platform_tag)]
            ffmpeg_library_dir = [ffmpeg_library_dir, os.path.join(ffmpeg_library_dir, platform_tag)]
        else:
            ffmpeg_include_dir = [ffmpeg_include_dir]
            ffmpeg_library_dir = [ffmpeg_library_dir]

        has_ffmpeg = True
        for library in ffmpeg_libraries:
            library_found = False
            for search_path in ffmpeg_include_dir + include_dirs:
                full_path = os.path.join(search_path, library, "*.h")
                library_found |= len(glob.glob(full_path)) > 0

            if not library_found:
                print(f"{library} header files were not found, disabling ffmpeg support")
                has_ffmpeg = False

    if has_ffmpeg:
        print(f"ffmpeg include path: {ffmpeg_include_dir}")
        print(f"ffmpeg library_dir: {ffmpeg_library_dir}")

        # TorchVision base decoder + video reader
        video_reader_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video_reader")
        video_reader_src = glob.glob(os.path.join(video_reader_src_dir, "*.cpp"))
        base_decoder_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "decoder")
        base_decoder_src = glob.glob(os.path.join(base_decoder_src_dir, "*.cpp"))
        # Torchvision video API
        videoapi_src_dir = os.path.join(this_dir, "torchvision", "csrc", "io", "video")
        videoapi_src = glob.glob(os.path.join(videoapi_src_dir, "*.cpp"))
        # exclude tests
        base_decoder_src = [x for x in base_decoder_src if "_test.cpp" not in x]

        combined_src = video_reader_src + base_decoder_src + videoapi_src

        ext_modules.append(
            CppExtension(
                "torchvision.video_reader",
                combined_src,
                include_dirs=[
                    base_decoder_src_dir,
                    video_reader_src_dir,
                    videoapi_src_dir,
                    extensions_dir,
                    *ffmpeg_include_dir,
                    *include_dirs,
                ],
                library_dirs=ffmpeg_library_dir + library_dirs,
                libraries=[
                    "avcodec",
                    "avformat",
                    "avutil",
                    "swresample",
                    "swscale",
                ],
                extra_compile_args=["-std=c++14"] if os.name != "nt" else ["/std:c++14", "/MP"],
                extra_link_args=["-std=c++14" if os.name != "nt" else "/std:c++14"],
            )
        )

    # Locating video codec
    # CUDA_HOME should be set to the cuda root directory.
    # TORCHVISION_INCLUDE and TORCHVISION_LIBRARY should include the location to
    # video codec header files and libraries respectively.
    video_codec_found = (
        extension is CUDAExtension
        and CUDA_HOME is not None
        and any([os.path.exists(os.path.join(folder, "cuviddec.h")) for folder in vision_include])
        and any([os.path.exists(os.path.join(folder, "nvcuvid.h")) for folder in vision_include])
        and any([os.path.exists(os.path.join(folder, "libnvcuvid.so")) for folder in library_dirs])
    )

    print(f"video codec found: {video_codec_found}")

    if (
        video_codec_found
        and has_ffmpeg
        and any([os.path.exists(os.path.join(folder, "libavcodec", "bsf.h")) for folder in ffmpeg_include_dir])
    ):
        gpu_decoder_path = os.path.join(extensions_dir, "io", "decoder", "gpu")
        gpu_decoder_src = glob.glob(os.path.join(gpu_decoder_path, "*.cpp"))
        cuda_libs = os.path.join(CUDA_HOME, "lib64")
        cuda_inc = os.path.join(CUDA_HOME, "include")

        ext_modules.append(
            extension(
                "torchvision.Decoder",
                gpu_decoder_src,
                include_dirs=include_dirs + [gpu_decoder_path] + [cuda_inc] + ffmpeg_include_dir,
                library_dirs=ffmpeg_library_dir + library_dirs + [cuda_libs],
                libraries=[
                    "avcodec",
                    "avformat",
                    "avutil",
                    "swresample",
                    "swscale",
                    "nvcuvid",
                    "cuda",
                    "cudart",
                    "z",
                    "pthread",
                    "dl",
                    "nppicc",
                ],
                extra_compile_args=extra_compile_args,
            )
        )
    else:
        print(
            "The installed version of ffmpeg is missing the header file 'bsf.h' which is "
            "required for GPU video decoding. Please install the latest ffmpeg from conda-forge channel:"
            " `conda install -c conda-forge ffmpeg`."
        )

    return ext_modules
Exemplo n.º 56
0
def install_package(package):
    package_type = package.split('-')[0]
    urlstr = 'https://gitlab.com/api/v4/projects/19185895/packages/pypi/simple/'
    directory = str(pathlib.Path().absolute())+'/devisor/'+TYPE_DICT[package_type]+'/'
    return subprocess.run([sys.executable, "-m", "pip", "install", package, '--no-index', '--find-links', urlstr+package, '-t', directory],
            capture_output=True)
Exemplo n.º 57
0
highlight_language = 'python3'
pygments_style = 'borland'
sys.path.insert(0, os.path.abspath('_extensions'))


# 0 - sequence number of image in whole document
# 1 - sequence number of image in header level 1 (only if :numbered: option is present at toctree directive)
# 2 - sequence number of image in header level 2
#       will use x.1, x.2, … if located directly under a header level 1,
#       will use 1, 2, … if at the document level
numfig_secnum_depth = 0
numfig = True
smartquotes = False

project_slug = re.sub(r'[\W]+', '', project)
sha1 = subprocess.run('git log -1 --format="%h"', stdout=subprocess.PIPE, shell=True, encoding='utf-8').stdout.strip()
year = date.today().year
today = date.today().strftime('%Y-%m-%d')

version = f'#{sha1}, {today}'
release = f'#{sha1}, {today}'
copyright = f'{year}, {author} <{email}>'

html_show_sphinx = False
html_use_smartypants = False
html_search_language = language
html_add_permalinks = ""
html_theme_path = ['_themes']
html_secnumber_suffix = '. '
html_title = project
Exemplo n.º 58
0
    def convert(self):
        """
        This method converts the SVG files found to EMF files.

        The filenames of the SVG files would be kept.

        Overwirte files with the same filenames without notice.

        Conversion is achieved via Inkscape. 
        
        The command is in the form of:

        .. code::

            inkscape foo.svg --export-emf=bar.emf

        Parameter
        ----------
        self
        
        Return
        -------
        bool
            Returns True if conversion successful.
            Returns False if conversion unsuccessful or on exception.
        
        Example
        --------
        .. code:: python
        
            self.btn_go.clicked.connect(self.convert)
        """

        print(gsyIO.date_time_now() + 'Converting...')

        # progress bar control
        int_count = 0

        dbl_progress = 0

        self.progressBar.setValue(dbl_progress)

        # check if all folders exist
        bool_dir_exist = self.check_dir_exist()

        try:

            # if not all folders exist, prompt error message and return False
            if bool_dir_exist == False:

                gsyIO.prompt_msg(str_title='Folder not found',
                                str_msg='At least one folder not found',
                                str_type='err')

                print(gsyIO.date_time_now() + 'At least one folder not found')
                print(gsyIO.date_time_now() + 'Conversion failed')

                return False

            else:
                
                # first part of the shell command
                str_inkscape = '"' + self.str_inkscape_dir + os.sep + 'inkscape' + '"'

                # search for SVG files
                list_svg_file_path = self.search_svg()

                # if the list is empty
                if not list_svg_file_path:

                    gsyIO.prompt_msg(str_title='SVG not found',
                                    str_msg='No SVG file found',
                                    str_type='err')

                    print(gsyIO.date_time_now() + 'No SVG file found')
                    print(gsyIO.date_time_now() + 'Conversion failed')

                    return False

                else:
                    
                    # save user settings
                    self.save_setting()

                    # get the total number of SVG files in the list
                    int_svg_file_count = len(list_svg_file_path)

                    # For-Loop through the SVG files and convert to EMF
                    for item in list_svg_file_path:

                        str_svg_file_path = item

                        # reverse find first path separator
                        index = str_svg_file_path.rfind(os.sep)

                        # get the filename (only) of the SVG file
                        str_svg_filename = str_svg_file_path[(index + 1):]

                        # find the "." of the SVG extension
                        index = str_svg_filename.rfind('.')

                        # replace the "svg" for "emf"
                        str_emf_filename = str_svg_filename[:(index + 1)] + 'emf'

                        # form the full path for the EMF file
                        str_emf_file_path = os.path.join(self.str_emf_dir, str_emf_filename)

                        # form the shell command
                        str_cmd = (str_inkscape + ' ' 
                                + '"' + str_svg_file_path + '"' + ' ' 
                                + '"' + CONST_EXPT_EMF + str_emf_file_path + '"')

                        # run the shell command, timeout is 10 minutes
                        obj = subprocess.run(str_cmd, shell=True, timeout=600)

                        # progress bar control
                        int_count += 1

                        dbl_progress = float(int_count) / float(int_svg_file_count) * 100.0

                        str_info = ('Converting ' + str(int_count) + ' of ' + str(int_svg_file_count)
                                    + ', ' + '{:.2f}'.format(dbl_progress) + r'%')

                        print(str_info)

                        self.progressBar.setValue(dbl_progress)

                    # open EMF folder on end
                    if self.checkBox.isChecked() == True:

                        self.open_emf_folder()

                    else:

                        pass

                    print(gsyIO.date_time_now() + 'Conversion complete')

                    return True

        except:

            print(gsyIO.date_time_now() + 'Conversion failed')

            return False
Exemplo n.º 59
0
    def c_repr(self, name, resources):
        typ = self.yaml_obj['type']
        if typ in ['checkers', 'check']:
            buf = """    Color {0}_color_0_raw = color({2:.10f}, {3:.10f}, {4:.10f});
    Color {0}_color_1_raw = color({5:.10f}, {6:.10f}, {7:.10f});
    Color {0}_color_0;
    Color {0}_color_1;
    color_space_fn({0}_color_0_raw, {0}_color_0);
    color_space_fn({0}_color_1_raw, {0}_color_1);
    uv_check_pattern({0}_color_0, {0}_color_1, {8}, {9}, {0});

""".format(name,
           "",
           self.yaml_obj['colors'][0][0],
           self.yaml_obj['colors'][0][1],
           self.yaml_obj['colors'][0][2],
           self.yaml_obj['colors'][1][0],
           self.yaml_obj['colors'][1][1],
           self.yaml_obj['colors'][1][2],
           self.yaml_obj['width'],
           self.yaml_obj['height'])

        elif typ in ['align_check', 'align-check']:
            if typ == 'align-check':
                typ = 'align_check'
            colors = self.yaml_obj['colors'] # dict

            buf = """    Color {0}_color_0_raw = color({2:.10f}, {3:.10f}, {4:.10f});
    Color {0}_color_1_raw = color({5:.10f}, {6:.10f}, {7:.10f});
    Color {0}_color_2_raw = color({8:.10f}, {9:.10f}, {10:.10f});
    Color {0}_color_3_raw = color({11:.10f}, {12:.10f}, {13:.10f});
    Color {0}_color_4_raw = color({14:.10f}, {15:.10f}, {16:.10f});
    Color {0}_color_0;
    Color {0}_color_1;
    Color {0}_color_2;
    Color {0}_color_3;
    Color {0}_color_4;
    color_space_fn({0}_color_0_raw, {0}_color_0);
    color_space_fn({0}_color_1_raw, {0}_color_1);
    color_space_fn({0}_color_2_raw, {0}_color_2);
    color_space_fn({0}_color_3_raw, {0}_color_3);
    color_space_fn({0}_color_4_raw, {0}_color_4);
    uv_align_check_pattern({0}_color_0, {0}_color_1, {0}_color_2, {0}_color_3, {0}_color_4, {0});

""".format(name,
           "",
           self.yaml_obj['colors']['main'][0],
           self.yaml_obj['colors']['main'][1],
           self.yaml_obj['colors']['main'][2],
           self.yaml_obj['colors']['ul'][0],
           self.yaml_obj['colors']['ul'][1],
           self.yaml_obj['colors']['ul'][2],
           self.yaml_obj['colors']['ur'][0],
           self.yaml_obj['colors']['ur'][1],
           self.yaml_obj['colors']['ur'][2],
           self.yaml_obj['colors']['bl'][0],
           self.yaml_obj['colors']['bl'][1],
           self.yaml_obj['colors']['bl'][2],
           self.yaml_obj['colors']['br'][0],
           self.yaml_obj['colors']['br'][1],
           self.yaml_obj['colors']['br'][2])
        elif typ == 'image':
            file_path = self.yaml_obj['file']
            png_file_path = file_path[:-3] + 'png';
            if file_path[-3:] != 'png':
                # check for existence of ppm extension file name
                if not (os.path.exists(png_file_path) and os.path.isfile(png_file_path)):
                    # if it does not exist, create it with 'convert'
                    subprocess.run(['convert', file_path, '-compress', 'none', '-quality', '95', png_file_path])

            if file_path not in resources:
                uv_pattern_name = 'pattern_{}'.format(file_path.replace('/', '_').replace('-','_').replace('.','_'))
                color_space_fn_name = 'rgb_to_rgb'
                if name.find('Ka') > 0 or name.find('Kd') > 0:
                    color_space_fn_name = 'color_space_fn'
                buf = """    if (access("{1}", F_OK ) == -1 ) {{
        printf("file '{1}' does not exist.");
        return 1;
    }}
    printf("Loading resource '{1}'... ");
    fflush(stdout);
    Canvas {2};
    read_png(&{2}, "{1}", false, {3});
    uv_texture_pattern({2}, {0});
    printf("Done!\\n");
    fflush(stdout);
""".format(name, png_file_path, uv_pattern_name, color_space_fn_name)
                resources[file_path] = uv_pattern_name
            else:
                buf = """    uv_texture_pattern({1}, {0});
""".format(name, resources[file_path])
        else:
            raise ValueError('Unable to parse uv pattern type: {}'.format(typ))

        return buf
Exemplo n.º 60
0
def cleanup_hidden_files(is_windows=is_windows_os()):
    subprocess.run(  # noqa: DUO116
        get_windows_commands_to_delete()
        if is_windows else " ".join(get_linux_commands_to_delete()),
        shell=True,
    )