Beispiel #1
0
def generate_gn_args(mode):
    out = []
    if mode == "release":
        out += ["is_official_build=true"]
    elif mode == "debug":
        out += ["is_debug=true"]
    else:
        print "Bad mode {}. Use 'release' or 'debug' (default)" % mode
        sys.exit(1)

    if "DENO_BUILD_ARGS" in os.environ:
        out += os.environ["DENO_BUILD_ARGS"].split()

    cacher = prebuilt.load_sccache()
    if not os.path.exists(cacher):
        cacher = find_executable("sccache") or find_executable("ccache")

    # Check if ccache or sccache are in the path, and if so we set cc_wrapper.
    cc_wrapper = cacher
    if cc_wrapper:
        # The gn toolchain does not shell escape cc_wrapper, so do it here.
        out += ['cc_wrapper=%s' % gn_string(shell_quote(cc_wrapper))]
        if os.name == "nt":
            # Disable treat_warnings_as_errors until this sccache bug is fixed:
            # https://github.com/mozilla/sccache/issues/264
            out += ["treat_warnings_as_errors=false"]

    # Look for sccache; if found, set rustc_wrapper.
    rustc_wrapper = cacher
    if rustc_wrapper:
        out += ['rustc_wrapper=%s' % gn_string(rustc_wrapper)]

    return out
Beispiel #2
0
    def test_make_distribution(self):

        # check if tar and gzip are installed
        if (find_executable('tar') is None or
            find_executable('gzip') is None):
            return

        # now building a sdist
        dist, cmd = self.get_cmd()

        # creating a gztar then a tar
        cmd.formats = ['gztar', 'tar']
        cmd.ensure_finalized()
        cmd.run()

        # making sure we have two files
        dist_folder = join(self.tmp_dir, 'dist')
        result = os.listdir(dist_folder)
        result.sort()
        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])

        os.remove(join(dist_folder, 'fake-1.0.tar'))
        os.remove(join(dist_folder, 'fake-1.0.tar.gz'))

        # now trying a tar then a gztar
        cmd.formats = ['tar', 'gztar']

        cmd.ensure_finalized()
        cmd.run()

        result = os.listdir(dist_folder)
        result.sort()
        self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz'])
Beispiel #3
0
def get():
    launcher_val = overrides.get('CHPL_LAUNCHER')
    if not launcher_val:
        comm_val = chpl_comm.get()
        platform_val = chpl_platform.get('target')
        compiler_val = chpl_compiler.get('target')

        if platform_val.startswith('cray-x') or chpl_platform.is_cross_compiling():
            has_aprun = find_executable('aprun')
            has_slurm = find_executable('srun')
            if has_aprun and has_slurm:
                launcher_val = 'none'
            elif has_aprun:
                launcher_val = 'aprun'
            elif has_slurm or platform_val == 'aarch64':
                launcher_val = 'slurm-srun'
            else:
                # FIXME: Need to detect aprun/srun differently. On a cray
                #        system with an eslogin node, it is possible that aprun
                #        will not be available on the eslogin node (only on the
                #        login node).
                #
                #        has_aprun and has_slurm should look other places
                #        (maybe the modules?) to decide.
                #        (thomasvandoren, 2014-08-12)
                sys.stderr.write(
                    'Warning: Cannot detect launcher on this system. Please '
                    'set CHPL_LAUNCHER in the environment.\n')
        elif platform_val == 'marenostrum':
            launcher_val = 'marenostrum'
        elif comm_val == 'gasnet':
            substrate_val = chpl_comm_substrate.get()
            if substrate_val == 'udp':
                launcher_val = 'amudprun'
            elif substrate_val == 'mpi':
                launcher_val = 'gasnetrun_mpi'
            elif substrate_val == 'ibv':
                if platform_val == 'pwr6':
                    # our loadleveler launcher is not yet portable/stable/
                    # flexible enough to serve as a good default
                    #launcher_val = 'loadleveler'
                    launcher_val = 'none'
                else:
                    launcher_val = 'gasnetrun_ibv'
            elif substrate_val == 'mxm':
                launcher_val = 'gasnetrun_ibv'
            elif substrate_val == 'lapi':
                # our loadleveler launcher is not yet portable/stable/flexible
                # enough to serve as a good default
                #launcher_val = 'loadleveler'
                launcher_val = 'none'
        elif comm_val == 'mpi':
            launcher_val = 'mpirun'
        else:
            launcher_val = 'none'

    if launcher_val is None:
        launcher_val = 'none'

    return launcher_val
Beispiel #4
0
    def find_binary_path(self,path=None, channel="nightly"):
        """Looks for the firefox binary in the virtual environment"""

        platform = {
            "Linux": "linux",
            "Windows": "win",
            "Darwin": "mac"
        }.get(uname[0])

        application_name = {
            "stable": "Firefox.app",
            "beta": "Firefox.app",
            "nightly": "Firefox Nightly.app"
        }.get(channel)

        if path is None:
            #os.getcwd() doesn't include the venv path
            path = os.path.join(os.getcwd(), "_venv", "browsers", channel)

        binary = None

        if platform == "linux":
            binary = find_executable("firefox", os.path.join(path, "firefox"))
        elif platform == "win":
            import mozinstall
            binary = mozinstall.get_binary(path, "firefox")
        elif platform == "mac":
            binary = find_executable("firefox", os.path.join(path, application_name,
                                                             "Contents", "MacOS"))

        return binary
Beispiel #5
0
def setup_sudoers(user):
	if not os.path.exists('/etc/sudoers.d'):
		os.makedirs('/etc/sudoers.d')

		set_permissions = False
		if not os.path.exists('/etc/sudoers'):
			set_permissions = True

		with open('/etc/sudoers', 'a') as f:
			f.write('\n#includedir /etc/sudoers.d\n')

		if set_permissions:
			os.chmod('/etc/sudoers', 0o440)

	sudoers_file = '/etc/sudoers.d/frappe'

	template = env.get_template('frappe_sudoers')
	frappe_sudoers = template.render(**{
		'user': user,
		'service': find_executable('service'),
		'systemctl': find_executable('systemctl'),
		'supervisorctl': find_executable('supervisorctl'),
		'nginx': find_executable('nginx'),
		'bench': find_executable('bench')
	})
	frappe_sudoers = safe_decode(frappe_sudoers)

	with open(sudoers_file, 'w') as f:
		f.write(frappe_sudoers)

	os.chmod(sudoers_file, 0o440)
Beispiel #6
0
 def test_40_can_resize(self):
     '''Test for installed filesystem resize utility with CanResize'''
     offline_shrink = 0b00010
     offline_grow   = 0b00100
     online_shrink  = 0b01000
     online_grow    = 0b10000
     manager = self.get_interface(self.manager_obj, '.Manager')
     with self.assertRaises(dbus.exceptions.DBusException):
         manager.CanResize('nilfs2')
     avail, mode, util = manager.CanResize('xfs')
     # the resize mode flage values are defined in the method documentation
     self.assertEqual(mode, online_grow | offline_grow)
     if avail:
         self.assertEqual(util, '')
     else:
         self.assertEqual(util, 'xfs_growfs')
     self.assertEqual(avail, find_executable('xfs_growfs') is not None)
     avail, mode, util = manager.CanResize('ext4')
     self.assertEqual(mode, offline_shrink | offline_grow | online_grow)
     if avail:
         self.assertEqual(util, '')
     else:
         self.assertEqual(util, 'resize2fs')
     self.assertEqual(avail, find_executable('resize2fs') is not None)
     avail, mode, util = manager.CanResize('vfat')
     self.assertTrue(avail)  # libparted, no executable
     self.assertEqual(util, '')
     self.assertEqual(mode, offline_shrink | offline_grow)
Beispiel #7
0
def _findEditorTools(usdFileName, readOnly):
    from distutils.spawn import find_executable

    # Ensure the usdcat executable has been installed
    usdcatCmd = find_executable("usdcat")
    if not usdcatCmd:
        sys.exit("Error: Couldn't find 'usdcat'. Expected it to be in PATH.")

    # Ensure we have a suitable editor available
    editorCmd = (os.getenv("EDITOR") or 
                 find_executable("emacs") or 
                 find_executable("vim")) 
    
    if not editorCmd:
        sys.exit("Error: Couldn't find a suitable text editor to use. Expected " 
                 "either $EDITOR to be set, or emacs/vim to be installed.")


    # special handling for emacs users
    if 'emacs' in editorCmd:
        title = '"usdedit %s%s"' % ("--noeffect " if readOnly else "",
                                    usdFileName)
        editorCmd += " -name %s" % title

    return (usdcatCmd, editorCmd)
Beispiel #8
0
    def _test_deploy_remove(self, system):
        if system == "nssm":
            args = find_executable("python") or "c:\\python27\\python"
        else:
            args = find_executable("python2") or "/usr/bin/python2"
        init_system = {"--init-system=": system}
        opts = {
            "-n": self.service_name,
            "-a": "-m SimpleHTTPServer",
            "-d": None,
            "-s": None,
            "-v": None,
            "--overwrite": None,
        }
        opts.update(init_system)

        _invoke_click("generate", [args], opts)
        self._verify_port_open()
        if not utils.IS_WIN:
            _invoke_click("stop", [self.service_name], init_system)
            self._verify_port_closed()
            _invoke_click("start", [self.service_name], init_system)
            self._verify_port_open()
            _invoke_click("restart", [self.service_name], init_system)
            self._verify_port_open()
        _invoke_click("remove", [self.service_name], init_system)
        self._verify_port_closed()
Beispiel #9
0
 def test_40_can_format(self):
     '''Test for installed filesystem creation utility with CanFormat'''
     manager = self.get_interface(self.manager_obj, '.Manager')
     with self.assertRaises(dbus.exceptions.DBusException):
         manager.CanFormat('wxyz')
     avail, util = manager.CanFormat('xfs')
     if avail:
         self.assertEqual(util, '')
     else:
         self.assertEqual(util, 'mkfs.xfs')
     self.assertEqual(avail, find_executable('mkfs.xfs') is not None)
     avail, util = manager.CanFormat('f2fs')
     if avail:
         self.assertEqual(util, '')
     else:
         self.assertEqual(util, 'mkfs.f2fs')
     self.assertEqual(avail, find_executable('mkfs.f2fs') is not None)
     avail, util = manager.CanFormat('ext4')
     if avail:
         self.assertEqual(util, '')
     else:
         self.assertEqual(util, 'mkfs.ext4')
     self.assertEqual(avail, find_executable('mkfs.ext4') is not None)
     for fs in map(str, self.get_property(self.manager_obj, '.Manager', 'SupportedFilesystems').value):
         avail, util = manager.CanFormat(fs)
         # currently UDisks relies on executables for filesystem creation
         if avail:
             self.assertEqual(util, '')
         else:
             self.assertGreater(len(util), 0)
Beispiel #10
0
def execute(bench_path):
	expected_node_ver = Version('5.0.0')
	node_exec = find_executable('node') or find_executable('nodejs')


	if node_exec:
		result = subprocess.check_output([node_exec, '-v']).decode()
	else:
		click.echo('''
		No node executable was found on your machine.
		Please install latest node version before running "bench update". For installation instructions
		please refer "Debian and Ubuntu based Linux distributions" section or "Enterprise Linux and
		Fedora" section depending upon your OS on the following link,
		"https://nodejs.org/en/download/package-manager/"
		''')
		sys.exit(1)

	node_ver = Version(result.rstrip('\n').lstrip('v'))

	if node_ver < expected_node_ver:
		click.echo('''
		Please update node to latest version before running "bench update".
		Please install latest node version before running "bench update". For installation instructions
		please refer "Debian and Ubuntu based Linux distributions" section or "Enterprise Linux and
		Fedora" section depending upon your OS on the following link,
		"https://nodejs.org/en/download/package-manager/"
		''')
		sys.exit(1)
Beispiel #11
0
  def android_update_sdk_path(self):
    """Checks PATH variable and edits bashrc/profile/path for Android SDK."""
    tools_update_path = True
    platform_update_path = True
    if find_executable("android"):
      tools_update_path = False
    if find_executable("ndk-build"):
      platform_update_path = False

    if tools_update_path or platform_update_path:
      if self.bash:  # LINUX or MAC
        with open(self.bash, "a") as f:
          if tools_update_path:
            f.write("export PATH=" + os.path.join(self.sdk_path, "tools")
                    + ":$PATH\n")
          if platform_update_path:
            f.write("export PATH=" + os.path.join(self.sdk_path, "platform"
                                                  "-tools") + ":$PATH\n")
      else:  # WINDOWS
        if tools_update_path:
          self.windows_path_update = os.path.join(
              self.sdk_path, "tools") + os.pathsep + self.windows_path_update
        if platform_update_path:
          self.windows_path_update = (os.path.join(
              self.sdk_path, "platform-tools") + os.pathsep +
                                      self.windows_path_update)
      self.bash_changed = True
Beispiel #12
0
    def init(self):
        self.cli = find_executable(self.cli_str)
        if self.container and not self.cli:
            host_path = []
            for path in os.environ.get("PATH").split(":"):
                host_path.append("/host%s" % path)
            self.cli = find_executable(self.cli_str, path=":".join(host_path))
            if not self.cli:
                # if run as non-root we need a symlink in the container
                os.symlink("/host/usr/bin/openshift", "/usr/bin/oc")
                self.cli = "/usr/bin/oc"

        if not self.cli or not os.access(self.cli, os.X_OK):
            raise ProviderFailedException("Command %s not found" % self.cli)
        else:
            logger.debug("Using %s to run OpenShift commands.", self.cli)

        if "openshiftconfig" in self.config:
            self.config_file = self.config["openshiftconfig"]
            if self.container:
                self.config_file = os.path.join("/host", self.config_file.lstrip("/"))
        else:
            logger.warning("Configuration option 'openshiftconfig' not found")

        if not self.config_file or not os.access(self.config_file, os.R_OK):
            raise ProviderFailedException(
                "Cannot access configuration file %s. Try adding "
                "'openshiftconfig = /path/to/your/.kube/config' in the "
                "[general] section of the answers.conf file." % self.config_file)
Beispiel #13
0
def test_x11_remote():
    from nose.plugins.skip import SkipTest
    from distutils.spawn import find_executable
    if not find_executable('Xorg') or not find_executable('xterm'):
        raise SkipTest("Testing X11Remote requires X11 and xterm")

    with utils.named_temporary_directory() as tmp, \
            temporary_x_session() as display:
        r = uri_to_remote('x11:%s' % display)

        subprocess.Popen(
            ['xterm', '-l', '-lf', 'xterm.log'],
            env={'DISPLAY': display, 'PATH': os.environ['PATH']},
            cwd=tmp, stderr=open('/dev/null', 'w'))

        # Can't be sure how long xterm will take to get ready:
        for _ in range(0, 20):
            for keysym in ['t', 'o', 'u', 'c', 'h', 'space', 'g', 'o', 'o',
                           'd', 'Return']:
                r.press(keysym)
            if os.path.exists(tmp + '/good'):
                break
            time.sleep(0.5)
        with open(tmp + '/xterm.log', 'r') as log:
            for line in log:
                print "xterm.log: " + line,
        assert os.path.exists(tmp + '/good')
Beispiel #14
0
def _gcc_search(library):
    """
    Search a library path by calling and parsing a C linker call.

    Run GCC's linker with the -t (aka --trace) option and examine the library
    name it prints out. The GCC command will fail because we haven't supplied a
    proper program with main(), but that does not matter.
    """
    linker = find_executable('cc') or find_executable('gcc')
    if linker is None:
        return None

    try:
        with NamedTemporaryFile() as tmp:
            command = '{linker} -Wl,-t -o "{program}" -l"{library}"'.format(
                linker=linker, program=tmp.name, library=library
            )
            trace = _execute(command)
    except OSError as e:
        # ENOENT is raised if the file was already removed, which is the normal
        # behaviour of GCC if linking fails
        if e.errno != ENOENT:
            raise

    if trace is None:
        return None

    expr = r'[^\(\)\s]*lib{}\.[^\(\)\s]*'.format(escape(library))
    match = search(expr, trace)
    if not match:
        return None

    library_path = normpath(match.group(0))
    return _final_path(library_path)
Beispiel #15
0
def test_x11_remote():
    from unittest import SkipTest
    from .x11 import x_server

    if not find_executable("Xorg") or not find_executable("xterm"):
        raise SkipTest("Testing X11Remote requires X11 and xterm")

    with utils.named_temporary_directory() as tmp, x_server(320, 240) as display:
        r = uri_to_remote("x11:%s" % display)

        subprocess.Popen(
            ["xterm", "-l", "-lf", "xterm.log"],
            env={"DISPLAY": display, "PATH": os.environ["PATH"]},
            cwd=tmp,
            stderr=open("/dev/null", "w"),
        )

        # Can't be sure how long xterm will take to get ready:
        for _ in range(0, 20):
            for keysym in ["KEY_T", "KEY_O", "KEY_U", "KEY_C", "KEY_H", "KEY_SPACE", "g", "o", "o", "d", "KEY_OK"]:
                r.press(keysym)
            if os.path.exists(tmp + "/good"):
                break
            time.sleep(0.5)
        with open(tmp + "/xterm.log", "r") as log:
            for line in log:
                print "xterm.log: " + line,
        assert os.path.exists(tmp + "/good")
def check_for_blast():
    makeblastdb_path = spawn.find_executable('makeblastdb')
    blastn_path = spawn.find_executable('blastn')
    blast_installed = (makeblastdb_path != None and blastn_path != None)
    if not blast_installed:
        print('Error: could not find BLAST program', file=sys.stderr)
        quit()
Beispiel #17
0
def to_text(path):
    """Wraps Tesseract OCR.

    Parameters
    ----------
    path : str
        path of electronic invoice in JPG or PNG format

    Returns
    -------
    extracted_str : str
        returns extracted text from image in JPG or PNG format

    """
    import subprocess
    from distutils import spawn

    # Check for dependencies. Needs Tesseract and Imagemagick installed.
    if not spawn.find_executable('tesseract'):
        raise EnvironmentError('tesseract not installed.')
    if not spawn.find_executable('convert'):
        raise EnvironmentError('imagemagick not installed.')

    # convert = "convert -density 350 %s -depth 8 tiff:-" % (path)
    convert = ['convert', '-density', '350', path, '-depth', '8', 'png:-']
    p1 = subprocess.Popen(convert, stdout=subprocess.PIPE, shell=True)

    tess = ['tesseract', 'stdin', 'stdout']
    p2 = subprocess.Popen(tess, stdin=p1.stdout, stdout=subprocess.PIPE)

    out, err = p2.communicate()

    extracted_str = out

    return extracted_str
def deployDataprocessor(keyfile, hostname, dbServer):

	updateConfigFile(dbServer)
	
	if sshFileCheck(keyfile):
		if find_executable('scp') is not None and os.path.isdir('dataProcessor'):

			#Copy dataProcessor to home for couchDB setup
			status_code = subprocess.call(['scp', '-i', keyfile, '-r', './dataProcessor/', 'ubuntu@' + hostname +':~/dataProcessor'])
			if status_code != 0:
				sys.exit(1)

			status_code = subprocess.call(['scp', '-i', keyfile, '-r', './nltk_data', 'ubuntu@' + hostname +':~/'])
			if status_code != 0:
				sys.exit(1)
		else:
			print('Please install scp or dataProcessor or nltk_data is in place and run the program again')
			sys.exit(1)
	else:
		print('public key is missing')
	
	deployView(hostname)
	
	if find_executable('ansible-playbook') is not None  and os.path.isfile('dataProcessor.yml'):
		status_code = subprocess.call(['ansible-playbook', 'dataProcessor.yml'])
		if status_code != 0:
			sys.exit(1)
		print('Ansible deploying dataProcessor done')
	else:
		print('ansible-playbook is not installed or dataProcessor.yml might be missing')
        def setUp(self):
            super(BrowserJsUnitTestCase, self).setUp()
            
            # If java not available, skip the test since it is needed
            #    by jsTestDriver
            if not find_executable( "java" ):
                raise SkipTest( "java is needed to do the "
                                "GUI JavaScript unit testing" )
    
            # run headless if xvfb is available             
            if find_executable( "xvfb-run" ):
                java_cmd = "xvfb-run java"
            else:
            	 java_cmd = "java"

            # What is the path to the exe of this browser, if any
            browser_name, browser_exe_filepath = self.get_browser_info()
            if not browser_exe_filepath:
                raise SkipTest("The browser '%s' is not available for use in "
                               "GUI JavaScript unit testing"  % browser_name)
            
            # Set some env vars used by jsTestDriver
            os.environ[ 'JSTESTDRIVER' ] = """%(java_cmd)s -jar %(jstd_path)s
                                               --port %(port_num)d
                                               --captureConsole
                                               --browser %(browser)s""" % \
            { 'java_cmd':  java_cmd,
              'jstd_path': jstestdriver_path,
              'port_num' : port_number,
              'browser':   browser_exe_filepath,
              }
            os.environ[ 'JSTESTDRIVER_PORT' ] = str( port_number )
            os.environ[ 'JSTESTDRIVER_SERVER' ] = \
                        'http://localhost:%d' % ( port_number )
Beispiel #20
0
    def setUp(self):
        if None in [spawn.find_executable('ocropus-rpred'),
                    spawn.find_executable('ocropus-gpageseg'),
                    spawn.find_executable('ocropus-hocr')]:
            raise SkipTest
        self.config_mock = MagicMock()
        storage_path = unicode(tempfile.mkdtemp())
        self.config_mock.nidaba_cfg = {
            'storage_path': storage_path,
            'lang_dicts': {},
            'ocropus_models': {'ocropus': ('test', 'en-default.pyrnn.gz')},
            'plugins_load': {}
        }

        self.patches = {
            'nidaba.config': self.config_mock,
        }
        self.patcher = patch.dict('sys.modules', self.patches)
        self.patcher2 = patch('nidaba.storage.nidaba_cfg', self.config_mock.nidaba_cfg)
        self.addCleanup(self.patcher2.stop)
        self.addCleanup(self.patcher.stop)
        self.patcher.start()
        self.patcher2.start()
	self.storage_path = storage_path
        shutil.copytree(resources, self.storage_path + '/test')
        from nidaba.plugins import ocropus
        ocropus.setup()
        self.ocropus = ocropus
Beispiel #21
0
   def build_man_page(self):
      """Build man pages for all installable programs."""
        
      self.announce("building man pages")

      descriptions = {}
      descriptions['synopsis'] = """simple frontend to the Synopsis framework, a multi-language source code introspection tool that
provides a variety of representations for the parsed code, to
enable further processing such as documentation extraction,
reverse engineering, and source-to-source translation."""
      
      descriptions['sxr-server'] = """the Synopsis Cross-Reference http server. Allows users
to query and browse cross-referenced source code."""
      

      help2man = find_executable('help2man')
      if not help2man:
         self.warn("cannot build man pages")
         return
      gzip = find_executable('gzip')

      section = 1
      man_dir = 'share/man/man%d'%section
      mkpath(man_dir, 0777, self.verbose, self.dry_run)

      for s in ['synopsis', 'sxr-server']:
         command = [help2man, '-N', '-n', descriptions[s]]
         executable = os.path.join('scripts', s)
         output = '%s/%s.%d'%(man_dir, s, section)
         command += ['-o', output, executable]
         spawn(command)
         if gzip:
            spawn(['gzip', '-f', output])
Beispiel #22
0
 def run(self):
     final_build_dir = os.path.join(self.build_base, self.cmake_dir)
     self.cmake_exe = find_executable('cmake', os.path.join(final_build_dir, 'bin'))
     if self.cmake_exe is None:
         self.__build_cmake(self.cmake_version, final_build_dir, self.build_base)
         self.cmake_exe = find_executable('cmake', os.path.join(final_build_dir, 'bin'))
         assert self.cmake_exe
Beispiel #23
0
def get_ifconfig():
    # This is only useful for Arch Linux which does not contain ifconfig by default
    if not find_executable('ifconfig'):
        install = raw_input(
            ('[' + T + '*' + W + '] ifconfig not found. ' +
             'install now? [y/n] ')
        )
        if install == 'y':
            if os.path.isfile('/usr/bin/pacman'):
                os.system('pacman -S net-tools')
            else:
                sys.exit((
                    '\n[' + R + '-' + W + '] Don\'t know how to install ifconfig for your distribution.\n' +
                    '[' + G + '+' + W + '] Rerun the script after installing it manually.\n' +
                    '[' + R + '!' + W + '] Closing'
                ))
        else:
            sys.exit(('[' + R + '-' + W + '] ifconfig' +
                     ' not found'))
    if not find_executable('ifconfig'):
        sys.exit((
            '\n[' + R + '-' + W + '] Unable to install the \'net-tools\' package!\n' +
            '[' + T + '*' + W + '] This process requires a persistent internet connection!\n' +
            '[' + G + '+' + W + '] Run pacman -Syu to make sure you are up to date first.\n' +
            '[' + G + '+' + W + '] Rerun the script to install net-tools.\n' +
            '[' + R + '!' + W + '] Closing'
         ))
Beispiel #24
0
    def process(self, progress):
            from distutils.spawn import find_executable
            res = find_executable("cygpath", os.path.join(config['paths']['build'], "cygwin", "bin"))
            if res is not None:
                current_dir_cygwin = subprocess.check_output("{0} {1}"
                                .format(res,
                                os.path.join(config["paths"]["build"], "icu", "dist")))

            soutpath = os.path.join(self._context["build_path"], "stdout.log")
            serrpath = os.path.join(self._context["build_path"], "stderr.log")
            with open(soutpath, "w") as sout:
                with open(serrpath, "w") as serr:
                    res = find_executable("bash", os.path.join(config['paths']['build'], "cygwin", "bin"))
                    proc = subprocess.Popen([res, "runConfigureICU", "Cygwin/MSVC", "--prefix"
                                            , "{}".format(current_dir_cygwin)],
                             env=icu_environment(),
                             cwd=os.path.join(self._context["build_path"], "source"),
                             shell=True,
                             stdout=sout, stderr=serr)
                proc.communicate()
                if proc.returncode != 0:
                    logging.error("failed to run icu runConfigureICU (returncode %s), see %s and %s",
                                  proc.returncode, soutpath, serrpath)
                    return False

            return True
def check_requirements():
  for cmd in (('convert','imagemagick'),('identify','imagemagick'),('mogrify','imagemagick')):
    if not spawn.find_executable(cmd[0]):
      print("Missing required program '%s'." %cmd[1])
      if opsys == "Linux":
        print("Please install from the package manager and try again")
      else:
        print("Please install the imagemagick suite from http://imagemagick.org/script/binary-releases.php#windows and try again")
      sys.exit(1)
  for cmd in (('exiftool','exiftool'),):
    if not spawn.find_executable(cmd[0]):
      print("Missing required program '%s'." %cmd[1])
      if opsys == "Linux":
        print("Please install from the package manager and try again")
      else:
        print("Please install exiftool from http://www.sno.phy.queensu.ca/~phil/exiftool/ and try again")
      sys.exit(1)
  for cmd in (('awk','awk'),):
    if not spawn.find_executable(cmd[0]):
      print("Missing required program '%s'." %cmd[1])
      if opsys == "Linux":
        print("Please install from the package manager and try again")
      else:
        print("Please install awk from http://gnuwin32.sourceforge.net/packages/gawk.htm and try again")
      sys.exit(1)
Beispiel #26
0
def setup_sudoers(user):
    if not os.path.exists("/etc/sudoers.d"):
        os.makedirs("/etc/sudoers.d")

        set_permissions = False
        if not os.path.exists("/etc/sudoers"):
            set_permissions = True

        with open("/etc/sudoers", "a") as f:
            f.write("\n#includedir /etc/sudoers.d\n")

        if set_permissions:
            os.chmod("/etc/sudoers", 0440)

    sudoers_file = "/etc/sudoers.d/frappe"

    template = env.get_template("frappe_sudoers")
    frappe_sudoers = template.render(
        **{
            "user": user,
            "service": find_executable("service"),
            "systemctl": find_executable("systemctl"),
            "supervisorctl": find_executable("supervisorctl"),
            "nginx": find_executable("nginx"),
            "bench": find_executable("bench"),
        }
    )

    with open(sudoers_file, "w") as f:
        f.write(frappe_sudoers.encode("utf-8"))

    os.chmod(sudoers_file, 0440)
Beispiel #27
0
def mysql_system_database(run_services, mysql_data_dir, mysql_defaults_file, memory_temp_dir, lock_dir, services_log):
    """Install database to given path."""
    if run_services:
        mysql_install_db = find_executable('mysql_install_db')
        assert mysql_install_db, 'You have to install mysql_install_db script.'

        my_print_defaults = find_executable('my_print_defaults')
        assert my_print_defaults, 'You have to install my_print_defaults script.'

        mysql_basedir = os.path.dirname(os.path.dirname(my_print_defaults))

        try:
            services_log.debug('Starting mysql_install_db.')
            check_output([
                mysql_install_db,
                '--defaults-file={0}'.format(mysql_defaults_file),
                '--datadir={0}'.format(mysql_data_dir),
                '--basedir={0}'.format(mysql_basedir),
                '--user={0}'.format(os.environ['USER'])
            ])
        except CalledProcessWithOutputError as e:
            services_log.error(
                '{e.cmd} failed with output:\n{e.output}\nand erorr:\n{e.err}. '
                'Please ensure you disabled apparmor for /run/shm/** or for whole mysql'.format(e=e))
            raise
        finally:
            services_log.debug('mysql_install_db was executed.')
Beispiel #28
0
 def run(self):
     if True or not self.dry_run:
         antlr3 = None
         # First, try locally-hosted antlr3
         if os.path.exists('antlr-3.1.3.jar'):
             antlr3 = find_executable('java')
             if antlr3 is not None:
                 antlr3 = [antlr3, '-cp', 'antlr-3.1.3.jar', 'org.antlr.Tool']
         # Then, try to find system-provided one
         if antlr3 is None:
             antlr3 = find_executable('antlr3')
             if antlr3 is None:
                 raise RuntimeError("antlr3 (>= 3.1 but < 3.2) is required")
             antlr3 = [antlr3]
         # TODO: antlr3 jar and python runtime version check?
         source_file = os.path.join('orderlyjson', 'OrderlyJSON.g')
         if self.inplace:
             target_dir = 'orderlyjson'
         else:
             target_dir = os.path.join(self.build_lib, 'orderlyjson')
         tokens_file = os.path.join(target_dir, 'OrderlyJSON.tokens')
         self.mkpath(target_dir)
         self.spawn(antlr3 + ['-fo', target_dir, source_file])
         os.unlink(tokens_file)
     build_ext.run(self)
Beispiel #29
0
    def run(self):
        if self.compiler is None:
            self.compiler = distutils.ccompiler.get_default_compiler()
        bootstrap_args = []
        win64 = sys.maxsize > 2**32
        if self.compiler == 'msvc':
            bootstrap_args = ['--platform=msvc']
            distutils.msvc9compiler.VERSION = 11.0
            compiler = distutils.ccompiler.new_compiler(compiler='msvc')
            compiler.initialize('win-amd64' if win64 else 'win32')
        elif self.compiler == 'mingw32':
            bootstrap_args = ['--platform=mingw']

        ninja_info = dict(user=self.ninja_github_user,
            repo=self.ninja_github_repo, branch=self.ninja_github_branch)
        self.ninja_build_dir += '_' + self.compiler
        final_dir = os.path.join(self.build_base, self.ninja_build_dir)
        if os.path.isdir(final_dir):
            print("Found '{}', assuming it contains {}:{}".format(
                final_dir, ninja_info['repo'], ninja_info['branch']))
        else:
            get_from_github(ninja_info, final_dir, self.build_base)
        self.ninja_exe = find_executable('ninja', final_dir)
        if self.ninja_exe is None:
            subprocess.check_call([sys.executable, 'bootstrap.py'] +
                bootstrap_args, cwd=final_dir)
            self.ninja_exe = find_executable('ninja', final_dir)
            assert self.ninja_exe
def get_solver_home(solvername):
    from distutils.spawn import find_executable

    if solvername == CPLEX:
        # Try for environmental variable first
        env_path = os.getenv('CPLEXDIR')
        if env_path and len(env_path.strip()) > 0:
            return env_path

        # Try to find the cplex binary in the PATH
        ex_path = find_executable('cplex')
        if ex_path:
            ex_path = os.path.realpath(ex_path)  # Expand symbolic links if any
            ex_dir = os.path.dirname(ex_path)  # Path to the bin directory
            return os.path.abspath(os.path.join(ex_dir, os.pardir, os.pardir))

    elif solvername == GUROBI:
        # Try for environmental variable first
        env_path = os.getenv('GUROBI_HOME')
        if env_path and len(env_path.strip()) > 0:
            return env_path

        # Try to find the gurobi_cl binary in the PATH
        ex_path = find_executable('gurobi_cl')
        if ex_path:
            ex_path = os.path.realpath(ex_path)  # Expand symbolic links if any
            ex_dir = os.path.dirname(ex_path)  # Path to the bin directory
            return os.path.abspath(os.path.join(ex_dir, os.pardir))
    else:
        print >> sys.stderr, "Error unknown solver name", solvername

    return ""
Beispiel #31
0
    def handle(self, *args, **options):
        if not options["interactive"]:
            options["hostname"] = options["hostname"] or get_host_name()

        # blank allows ansible scripts to dump errors cleanly.
        print("                                     ")
        print("   _   __  ___    _     _ _          ")
        print("  | | / / / _ \  | |   (_) |         ")
        print("  | |/ / / /_\ \ | |    _| |_ ___    ")
        print("  |    \ |  _  | | |   | | __/ _ \   ")
        print("  | |\  \| | | | | |___| | ||  __/   ")
        print("  \_| \_/\_| |_/ \_____/_|\__\___|   ")
        print("                                     ")
        print("https://learningequality.org/ka-lite/")
        print("                                     ")
        print("         version %s" % VERSION)
        print("                                     ")

        if sys.version_info < (2, 7):
            raise CommandError("Support for Python version 2.6 and below had been discontinued, please upgrade.")
        elif sys.version_info >= (2, 8):
            raise CommandError(
                "Your Python version is: %d.%d.%d -- which is not supported. Please use the Python 2.7 series or wait for Learning Equality to release Kolibri.\n" % sys.version_info[:3])
        elif sys.version_info < (2, 7, 6):
            logging.warning(
                "It's recommended that you install Python version 2.7.6. Your version is: %d.%d.%d\n" % sys.version_info[:3])

        if options["interactive"]:
            print(
                "--------------------------------------------------------------------------------")
            print(
                "This script will configure the database and prepare it for use.")
            print(
                "--------------------------------------------------------------------------------")
            raw_input("Press [enter] to continue...")

        # Assuming uid '0' is always root
        if not is_windows() and hasattr(os, "getuid") and os.getuid() == 0:
            print(
                "-------------------------------------------------------------------")
            print("WARNING: You are installing KA-Lite as root user!")
            print(
                "\tInstalling as root may cause some permission problems while running")
            print("\tas a normal user in the future.")
            print(
                "-------------------------------------------------------------------")
            if options["interactive"]:
                if not raw_input_yn("Do you wish to continue and install it as root?"):
                    raise CommandError("Aborting script.\n")

        git_migrate_path = options["git_migrate_path"]

        if git_migrate_path:
            call_command("gitmigrate", path=git_migrate_path, interactive=options["interactive"])

        database_kind = settings.DATABASES["default"]["ENGINE"]
        if "sqlite" in database_kind:
            database_file = settings.DATABASES["default"]["NAME"]
        else:
            database_file = None

        database_exists = database_file and os.path.isfile(database_file)

        # An empty file is created automatically even when the database dosn't
        # exist. But if it's empty, it's safe to overwrite.
        database_exists = database_exists and os.path.getsize(database_file) > 0

        install_clean = not database_exists

        if database_file:
            if not database_exists:
                install_clean = True
            else:
                # We found an existing database file.  By default,
                #   we will upgrade it; users really need to work hard
                #   to delete the file (but it's possible, which is nice).
                print(
                    "-------------------------------------------------------------------")
                print("WARNING: Database file already exists!")
                print(
                    "-------------------------------------------------------------------")
                if not options["interactive"] \
                   or raw_input_yn("Keep database file and upgrade to KA Lite version %s? " % VERSION) \
                   or not raw_input_yn("Remove database file '%s' now? " % database_file) \
                   or not raw_input_yn("WARNING: all data will be lost!  Are you sure? "):
                    install_clean = False
                    print("Upgrading database to KA Lite version %s" % VERSION)
                else:
                    install_clean = True
                    print("OK.  We will run a clean install; ")
                    # After all, don't delete--just move.
                    print(
                        "the database file will be moved to a deletable location.")

        if not install_clean and not database_file:
            # Make sure that, for non-sqlite installs, the database exists.
            raise Exception(
                "For databases not using SQLite, you must set up your database before running setup.")

        # Do all input at once, at the beginning
        if install_clean and options["interactive"]:
            if not options["username"] or not options["password"]:
                print(
                    "Please choose a username and password for the admin account on this device.")
                print(
                    "\tYou must remember this login information, as you will need")
                print(
                    "\tto enter it to administer this installation of KA Lite.")
            (username, password) = get_username_password(
                options["username"], options["password"])
            email = options["email"]
            (hostname, description) = get_hostname_and_description(
                options["hostname"], options["description"])
        else:
            username = options["username"] = (
                options["username"] or
                getattr(settings, "INSTALL_ADMIN_USERNAME", None) or
                get_clean_default_username()
            )
            password = options["password"] or getattr(settings, "INSTALL_ADMIN_PASSWORD", None)
            email = options["email"]  # default is non-empty
            hostname = options["hostname"]
            description = options["description"]

        if username and not validate_username(username):
            raise CommandError(
                "Username must contain only letters, digits, and underscores, and start with a letter.\n")

        ########################
        # Now do stuff
        ########################

        # Clean *pyc files if we are in a git repo
        if settings.IS_SOURCE:
            clean_pyc(settings.SOURCE_DIR)
        else:
            # Because we install dependencies as data_files, we run into problems,
            # namely that the pyc files are left dangling.
            distributed_packages = [
                os.path.join(kalite.ROOT_DATA_PATH, 'dist-packages'),
                os.path.join(kalite.ROOT_DATA_PATH, 'python-packages'),
            ]
            # Try locating django
            for dir_to_clean in distributed_packages:
                clean_pyc(dir_to_clean)

        # Move database file (if exists)
        if install_clean and database_file and os.path.exists(database_file):
            if not settings.DB_TEMPLATE_DEFAULT or database_file != settings.DB_TEMPLATE_DEFAULT:
                # This is an overwrite install; destroy the old db
                dest_file = tempfile.mkstemp()[1]
                print(
                    "(Re)moving database file to temp location, starting clean install. Recovery location: %s" % dest_file)
                shutil.move(database_file, dest_file)

        if settings.DB_TEMPLATE_DEFAULT and not database_exists and os.path.exists(settings.DB_TEMPLATE_DEFAULT):
            print("Copying database file from {0} to {1}".format(settings.DB_TEMPLATE_DEFAULT, settings.DEFAULT_DATABASE_PATH))
            shutil.copy(settings.DB_TEMPLATE_DEFAULT, settings.DEFAULT_DATABASE_PATH)
        else:
            print("Baking a fresh database from scratch or upgrading existing database.")
            call_command("syncdb", interactive=False, verbosity=options.get("verbosity"))
            call_command("migrate", merge=True, verbosity=options.get("verbosity"))
        Settings.set("database_version", VERSION)

        # Copy all content item db templates
        reset_content_db(force=install_clean)

        # download the english content pack
        # This can take a long time and lead to Travis stalling. None of this
        # is required for tests, and does not apply to the central server.
        if options.get("no-assessment-items", False):

            logging.warning("Skipping content pack downloading and configuration.")

        else:

            # Outdated location of assessment items - move assessment items from their
            # old location (CONTENT_ROOT/khan where they were mixed with other content
            # items)

            # TODO(benjaoming) for 0.15, remove the "move assessment items"
            # mechanism
            writable_assessment_items = os.access(KHAN_ASSESSMENT_ITEM_ROOT, os.W_OK)

            # Remove old assessment items
            if os.path.exists(OLD_ASSESSMENT_ITEMS_LOCATION) and os.access(OLD_ASSESSMENT_ITEMS_LOCATION, os.W_OK):
                logging.info("Deleting old assessment items")
                shutil.rmtree(OLD_ASSESSMENT_ITEMS_LOCATION)

            if options['force-assessment-item-dl']:  # user wants to force a new download; do it if we can, else error
                if writable_assessment_items:
                    call_command("retrievecontentpack", "download", "en")
                else:
                    raise RuntimeError("Got force-assessment-item-dl but directory not writable")
            elif english_content_pack_and_assessment_resources_are_current():
                logging.warning("English content pack is already up-to-date; skipping download and configuration.")
            elif not writable_assessment_items:  # skip if we're not going to be able to unpack it anyway
                logging.warning("Assessment item directory not writable; skipping content pack download.")
            elif settings.RUNNING_IN_TRAVIS:  # skip if we're running on Travis
                logging.warning("Running in Travis; skipping content pack download.")
            elif not options['interactive']:  # skip if we're not running in interactive mode (and it wasn't forced)
                logging.warning("Not running in interactive mode; skipping content pack download.")
            else:  # if we get this far, then we need to ask the user whether/how they want to get the content pack
                print(
                    "\nIn order to access many of the available exercises, you need to load a content pack for the latest version.")
                print(
                    "If you have an Internet connection, you can download the needed package. Warning: this may take a long time!")
                print(
                    "If you have already downloaded the content pack, you can specify the location of the file in the next step.")
                print("Otherwise, we will download it from {url}.".format(url=CONTENTPACK_URL))

                if raw_input_yn("Do you wish to download and install the content pack now?"):
                    ass_item_filename = CONTENTPACK_URL
                    retrieval_method = "download"
                elif raw_input_yn("Do you have a local copy of the content pack already downloaded that you want to install?"):
                    ass_item_filename = get_assessment_items_filename()
                    retrieval_method = "local"
                else:
                    ass_item_filename = None
                    retrieval_method = "local"

                if not ass_item_filename:
                    logging.warning(
                        "No content pack given. You will need to download and install it later.")
                else:
                    call_command("retrievecontentpack", retrieval_method, "en", ass_item_filename, foreground=True)

        # Individually generate any prerequisite models/state that is missing
        if not Settings.get("private_key"):
            call_command("generatekeys", verbosity=options.get("verbosity"))
        if not Device.objects.count():
            call_command(
                "initdevice", hostname, description, verbosity=options.get("verbosity"))
        if not Facility.objects.count():
            Facility.initialize_default_facility()

        # Create the admin user
        # blank password (non-interactive) means don't create a superuser
        if password:
            admin = get_object_or_None(User, username=username)
            if not admin:
                call_command("createsuperuser", username=username, email=email,
                             interactive=False, verbosity=options.get("verbosity"))
                admin = User.objects.get(username=username)
            admin.set_password(password)
            admin.save()

        # Now deploy the static files
        logging.info("Copying static media...")
        ensure_dir(settings.STATIC_ROOT)

        # The following file ignores have to be preserved from a
        # collectstatic(clear=True), due to being bundled with content packs,
        # and we thus have now way of getting them back.
        collectstatic_ignores = [
            "*.vtt", "*.srt",  # subtitle files come with language packs -- don't delete
            "*/perseus/ke/exercises/*",  # exercises come with language packs, and we have no way to replicate
        ]
        call_command("collectstatic", interactive=False, verbosity=0, ignore_patterns=collectstatic_ignores,
                     clear=True)
        call_command("collectstatic_js_reverse", interactive=False)

        # This is not possible in a distributed env
        if not settings.CENTRAL_SERVER:

            kalite_executable = 'kalite'
            if not spawn.find_executable('kalite'):
                if os.name == 'posix':
                    start_script_path = os.path.realpath(
                        os.path.join(settings.PROJECT_PATH, "..", "bin", kalite_executable))
                else:
                    start_script_path = os.path.realpath(
                        os.path.join(settings.PROJECT_PATH, "..", "bin", "windows", "kalite.bat"))
            else:
                start_script_path = kalite_executable

            # Run annotate_content_items, on the distributed server.
            print("Annotating availability of all content, checking for content in this directory: (%s)" %
                  settings.CONTENT_ROOT)
            try:
                call_command("annotate_content_items")
            except OperationalError:
                pass

            # done; notify the user.
            print("\nCONGRATULATIONS! You've finished setting up the KA Lite server software.")
            print("You can now start KA Lite with the following command:\n\n\t%s start\n\n" % start_script_path)

            if options['interactive']:
                if raw_input_yn("Do you wish to start the server now?"):
                    print("Running {0} start".format(start_script_path))
                    p = subprocess.Popen([start_script_path, "start"], env=os.environ)
                    p.wait()
 def _find_adb(self):
     adb_loc = os.path.join(os.environ.get("ANDROID_SDK_ROOT", ""),
                            "platform-tools", "adb")
     if os.path.exists(adb_loc) and os.access(adb_loc, os.X_OK):
         return adb_loc
     return find_executable("adb")
Beispiel #33
0
def to_text(path, language='fra', **kwargs):
    """Wraps Tesseract 4 OCR with custom language model.

    Parameters
    ----------
    path : str
        path of electronic invoice in JPG or PNG format

    Returns
    -------
    extracted_str : str
        returns extracted text from image in JPG or PNG format

    """
    import subprocess
    from distutils import spawn
    import tempfile
    import time

    # Check for dependencies. Needs Tesseract and Imagemagick installed.
    if not spawn.find_executable('tesseract'):
        raise EnvironmentError('tesseract not installed.')
    if not spawn.find_executable('convert'):
        raise EnvironmentError('imagemagick not installed.')
    if not spawn.find_executable('gs'):
        raise EnvironmentError('ghostscript not installed.')

    with tempfile.NamedTemporaryFile(suffix='.tiff') as tf:
        # Step 1: Convert to TIFF
        gs_cmd = [
            'gs',
            '-q',
            '-dNOPAUSE',
            '-r600x600',
            '-sDEVICE=tiff24nc',
            '-sOutputFile=' + tf.name,
            path,
            '-c',
            'quit',
        ]
        subprocess.Popen(gs_cmd)
        time.sleep(3)

        # Step 2: Enhance TIFF
        magick_cmd = [
            'convert',
            tf.name,
            '-colorspace',
            'gray',
            '-type',
            'grayscale',
            '-contrast-stretch',
            '0',
            '-sharpen',
            '0x1',
            'tiff:-',
        ]

        p1 = subprocess.Popen(magick_cmd, stdout=subprocess.PIPE)

        tess_cmd = [
            'tesseract', '-l', language, '--oem', '1', '--psm', '3', 'stdin',
            'stdout'
        ]
        p2 = subprocess.Popen(tess_cmd,
                              stdin=p1.stdout,
                              stdout=subprocess.PIPE)

        out, err = p2.communicate()

        extracted_str = out

        return extracted_str
Beispiel #34
0
    ./example.py

Author:
    Diego Caviedes Nozal - 23.10.2020
'''
import argparse
import acoustic_gps as agp
import numpy as np
from matplotlib import pyplot as plt
from helper import *
from matplotlib import rc
import os
from distutils.spawn import find_executable

rc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})
if find_executable('latex'):
    print("latex installed")
    rc('text', usetex=True)

# TODO: Explain how Bivariate to Complex works in the code.
# TODO: Comment code properly
# TODO: Header and paper citing at the top (any CC license)?
# TODO: Reconstruction plot -> colorbars. axis labels
# TODO: Create folder if it doesn't exist

parser = argparse.ArgumentParser(description=__doc__,
                                 formatter_class=argparse.RawDescriptionHelpFormatter)
# parser.add_argument('foo', help="Name of file to process")
parser.parse_args()

Beispiel #35
0
def compile_cython_modules(profile=False,
                           coverage=False,
                           compile_minimal=False,
                           compile_more=False,
                           cython_with_refnanny=False):
    source_root = os.path.abspath(os.path.dirname(__file__))
    compiled_modules = [
        "Cython.Plex.Actions",
        "Cython.Plex.Scanners",
        "Cython.Compiler.FlowControl",
        "Cython.Compiler.Scanning",
        "Cython.Compiler.Visitor",
        "Cython.Runtime.refnanny",
    ]
    if not compile_minimal:
        compiled_modules.extend([
            "Cython.Plex.Machines",
            "Cython.Plex.Transitions",
            "Cython.Plex.DFA",
            "Cython.Compiler.FusedNode",
            "Cython.Tempita._tempita",
            "Cython.StringIOTree",
            "Cython.Utils",
        ])
    if compile_more and not compile_minimal:
        compiled_modules.extend([
            "Cython.Compiler.Code",
            "Cython.Compiler.Lexicon",
            "Cython.Compiler.Parsing",
            "Cython.Compiler.Pythran",
            "Cython.Build.Dependencies",
            "Cython.Compiler.ParseTreeTransforms",
            "Cython.Compiler.Nodes",
            "Cython.Compiler.ExprNodes",
            "Cython.Compiler.ModuleNode",
            "Cython.Compiler.Optimize",
        ])

    from distutils.spawn import find_executable
    from distutils.sysconfig import get_python_inc
    pgen = find_executable(
        'pgen',
        os.pathsep.join([
            os.environ['PATH'],
            os.path.join(get_python_inc(), '..', 'Parser')
        ]))
    if not pgen:
        sys.stderr.write(
            "Unable to find pgen, not compiling formal grammar.\n")
    else:
        parser_dir = os.path.join(os.path.dirname(__file__), 'Cython',
                                  'Parser')
        grammar = os.path.join(parser_dir, 'Grammar')
        subprocess.check_call([
            pgen,
            os.path.join(grammar),
            os.path.join(parser_dir, 'graminit.h'),
            os.path.join(parser_dir, 'graminit.c'),
        ])
        cst_pyx = os.path.join(parser_dir, 'ConcreteSyntaxTree.pyx')
        if os.stat(grammar)[stat.ST_MTIME] > os.stat(cst_pyx)[stat.ST_MTIME]:
            mtime = os.stat(grammar)[stat.ST_MTIME]
            os.utime(cst_pyx, (mtime, mtime))
        compiled_modules.extend([
            "Cython.Parser.ConcreteSyntaxTree",
        ])

    defines = []
    if cython_with_refnanny:
        defines.append(('CYTHON_REFNANNY', '1'))
    if coverage:
        defines.append(('CYTHON_TRACE', '1'))

    extensions = []
    for module in compiled_modules:
        source_file = os.path.join(source_root, *module.split('.'))
        pyx_source_file = source_file + ".py"
        if not os.path.exists(pyx_source_file):
            pyx_source_file += "x"  # .py -> .pyx

        dep_files = []
        if os.path.exists(source_file + '.pxd'):
            dep_files.append(source_file + '.pxd')

        extensions.append(
            Extension(
                module,
                sources=[pyx_source_file],
                define_macros=defines if '.refnanny' not in module else [],
                depends=dep_files))
        # XXX hack around setuptools quirk for '*.pyx' sources
        extensions[-1].sources[0] = pyx_source_file

    # optimise build parallelism by starting with the largest modules
    extensions.sort(key=lambda ext: os.path.getsize(ext.sources[0]),
                    reverse=True)

    from Cython.Distutils.build_ext import build_ext
    from Cython.Compiler.Options import get_directive_defaults
    get_directive_defaults().update(
        language_level=2,
        binding=False,
        always_allow_keywords=False,
        autotestdict=False,
    )
    if profile:
        get_directive_defaults()['profile'] = True
        sys.stderr.write("Enabled profiling for the Cython binary modules\n")
    if coverage:
        get_directive_defaults()['linetrace'] = True
        sys.stderr.write(
            "Enabled line tracing and profiling for the Cython binary modules\n"
        )

    # not using cythonize() directly to let distutils decide whether building extensions was requested
    add_command_class("build_ext", build_ext)
    setup_args['ext_modules'] = extensions
Beispiel #36
0
import subprocess
doc = DocStringFormatTester(mol2)


def test_load_mol2():
    trj = md.load(get_fn('imatinib.mol2'))
    ref_trj = md.load(get_fn('imatinib.pdb'))
    eq(trj.xyz, ref_trj.xyz)

    ref_top, ref_bonds = ref_trj.top.to_dataframe()
    top, bonds = trj.top.to_dataframe()
    eq(bonds, ref_bonds)


@skipif(
    find_executable('obabel') is None,
    'You need obabel installed to run this test')
@skipif(
    os.environ.get("TRAVIS", None) == 'true',
    "Skip testing of entire FreeSolv database on Travis.")
def test_load_freesolv_gaffmol2_vs_sybylmol2_vs_obabelpdb():
    with enter_temp_directory():

        tar_filename = "freesolve_v0.3.tar.bz2"
        tar = tarfile.open(get_fn(tar_filename), mode="r:bz2")
        tar.extractall()
        tar.close()

        database = pickle.load(open("./v0.3/database.pickle"))

        for key in database:
Beispiel #37
0
class TestShutil(unittest.TestCase):
    def setUp(self) -> None:
        super().setUp()
        self.tempdirs = List[str]()

    def tearDown(self) -> None:
        super().tearDown()
        while self.tempdirs:
            d = self.tempdirs.pop()
            shutil.rmtree(d, os.name in ('nt', 'cygwin'))

    @overload
    def write_file(self, path: str, content: str = 'xxx') -> None:
        """Writes a file in the given path.


        path can be a string or a sequence.
        """
        f = open(path, 'w')
        try:
            f.write(content)
        finally:
            f.close()

    @overload
    def write_file(self, path: Sequence[str], content: str = 'xxx') -> None:
        # JLe: work around mypy issue #238
        self.write_file(os.path.join(*list(path)), content)

    def mkdtemp(self) -> str:
        """Create a temporary directory that will be cleaned up.

        Returns the path of the directory.
        """
        d = tempfile.mkdtemp()
        self.tempdirs.append(d)
        return d

    def test_rmtree_errors(self) -> None:
        # filename is guaranteed not to exist
        filename = tempfile.mktemp()
        self.assertRaises(OSError, shutil.rmtree, filename)

    # See bug #1071513 for why we don't run this on cygwin
    # and bug #1076467 for why we don't run this as root.
    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
            and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):

        def test_on_error(self) -> None:
            self.errorState = 0
            os.mkdir(TESTFN)
            self.childpath = os.path.join(TESTFN, 'a')
            f = open(self.childpath, 'w')
            f.close()
            old_dir_mode = os.stat(TESTFN).st_mode
            old_child_mode = os.stat(self.childpath).st_mode
            # Make unwritable.
            os.chmod(self.childpath, stat.S_IREAD)
            os.chmod(TESTFN, stat.S_IREAD)

            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
            # Test whether onerror has actually been called.
            self.assertEqual(
                self.errorState, 2,
                "Expected call to onerror function did not happen.")

            # Make writable again.
            os.chmod(TESTFN, old_dir_mode)
            os.chmod(self.childpath, old_child_mode)

            # Clean up.
            shutil.rmtree(TESTFN)

    def check_args_to_onerror(
            self, func: Function[[str], Any], arg: str,
            exc: Tuple[type, BaseException, Traceback]) -> None:
        # test_rmtree_errors deliberately runs rmtree
        # on a directory that is chmod 400, which will fail.
        # This function is run when shutil.rmtree fails.
        # 99.9% of the time it initially fails to remove
        # a file in the directory, so the first time through
        # func is os.remove.
        # However, some Linux machines running ZFS on
        # FUSE experienced a failure earlier in the process
        # at os.listdir.  The first failure may legally
        # be either.
        if self.errorState == 0:
            if func is os.remove:
                self.assertEqual(arg, self.childpath)
            else:
                self.assertIs(func, os.listdir,
                              "func must be either os.remove or os.listdir")
                self.assertEqual(arg, TESTFN)
            self.assertTrue(issubclass(exc[0], OSError))
            self.errorState = 1
        else:
            self.assertEqual(func, os.rmdir)
            self.assertEqual(arg, TESTFN)
            self.assertTrue(issubclass(exc[0], OSError))
            self.errorState = 2

    def test_rmtree_dont_delete_file(self) -> None:
        # When called on a file instead of a directory, don't delete it.
        handle, path = tempfile.mkstemp()
        os.fdopen(handle).close()
        self.assertRaises(OSError, shutil.rmtree, path)
        os.remove(path)

    def _write_data(self, path: str, data: str) -> None:
        f = open(path, "w")
        f.write(data)
        f.close()

    def test_copytree_simple(self) -> None:
        def read_data(path: str) -> str:
            f = open(path)
            data = f.read()
            f.close()
            return data

        src_dir = tempfile.mkdtemp()
        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')

        try:
            shutil.copytree(src_dir, dst_dir)
            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
            self.assertTrue(
                os.path.isfile(os.path.join(dst_dir, 'test_dir', 'test.txt')))
            actual = read_data(os.path.join(dst_dir, 'test.txt'))
            self.assertEqual(actual, '123')
            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
            self.assertEqual(actual, '456')
        finally:
            for path in (
                    os.path.join(src_dir, 'test.txt'),
                    os.path.join(dst_dir, 'test.txt'),
                    os.path.join(src_dir, 'test_dir', 'test.txt'),
                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
            ):
                if os.path.exists(path):
                    os.remove(path)
            for path in (src_dir, os.path.dirname(dst_dir)):
                if os.path.exists(path):
                    shutil.rmtree(path)

    def test_copytree_with_exclude(self) -> None:
        def read_data(path: str) -> str:
            f = open(path)
            data = f.read()
            f.close()
            return data

        # creating data
        join = os.path.join
        exists = os.path.exists
        src_dir = tempfile.mkdtemp()
        try:
            dst_dir = join(tempfile.mkdtemp(), 'destination')
            self._write_data(join(src_dir, 'test.txt'), '123')
            self._write_data(join(src_dir, 'test.tmp'), '123')
            os.mkdir(join(src_dir, 'test_dir'))
            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2'))
            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
                             '456')
            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
                             '456')

            # testing glob-like patterns
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(exists(join(dst_dir, 'test.txt')))
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
            try:
                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
                shutil.copytree(src_dir, dst_dir, ignore=patterns)
                # checking the result: some elements should not be copied
                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir2')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))
            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)

            # testing callable-style
            try:

                def _filter(src: str, names: Sequence[str]) -> List[str]:
                    res = List[str]()
                    for name in names:
                        path = os.path.join(src, name)

                        if (os.path.isdir(path)
                                and path.split()[-1] == 'subdir'):
                            res.append(name)
                        elif os.path.splitext(path)[-1] in ('.py'):
                            res.append(name)
                    return res

                shutil.copytree(src_dir, dst_dir, ignore=_filter)

                # checking the result: some elements should not be copied
                self.assertTrue(not exists(
                    join(dst_dir, 'test_dir2', 'subdir2', 'test.py')))
                self.assertTrue(
                    not exists(join(dst_dir, 'test_dir2', 'subdir')))

            finally:
                if os.path.exists(dst_dir):
                    shutil.rmtree(dst_dir)
        finally:
            shutil.rmtree(src_dir)
            shutil.rmtree(os.path.dirname(dst_dir))

    @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
    def test_dont_copy_file_onto_link_to_itself(self) -> None:
        # Temporarily disable test on Windows.
        if os.name == 'nt':
            return
        # bug 851123.
        os.mkdir(TESTFN)
        src = os.path.join(TESTFN, 'cheese')
        dst = os.path.join(TESTFN, 'shop')
        try:
            with open(src, 'w') as f:
                f.write('cheddar')
            os.link(src, dst)
            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
            with open(src, 'r') as f:
                self.assertEqual(f.read(), 'cheddar')
            os.remove(dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    @support.skip_unless_symlink
    def test_dont_copy_file_onto_symlink_to_itself(self) -> None:
        # bug 851123.
        os.mkdir(TESTFN)
        src = os.path.join(TESTFN, 'cheese')
        dst = os.path.join(TESTFN, 'shop')
        try:
            with open(src, 'w') as f:
                f.write('cheddar')
            # Using `src` here would mean we end up with a symlink pointing
            # to TESTFN/TESTFN/cheese, while it should point at
            # TESTFN/cheese.
            os.symlink('cheese', dst)
            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
            with open(src, 'r') as f:
                self.assertEqual(f.read(), 'cheddar')
            os.remove(dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    @support.skip_unless_symlink
    def test_rmtree_on_symlink(self) -> None:
        # bug 1669.
        os.mkdir(TESTFN)
        try:
            src = os.path.join(TESTFN, 'cheese')
            dst = os.path.join(TESTFN, 'shop')
            os.mkdir(src)
            os.symlink(src, dst)
            self.assertRaises(OSError, shutil.rmtree, dst)
        finally:
            shutil.rmtree(TESTFN, ignore_errors=True)

    if hasattr(os, "mkfifo"):
        # Issue #3002: copyfile and copytree block indefinitely on named pipes
        def test_copyfile_named_pipe(self) -> None:
            os.mkfifo(TESTFN)
            try:
                self.assertRaises(shutil.SpecialFileError, shutil.copyfile,
                                  TESTFN, TESTFN2)
                self.assertRaises(shutil.SpecialFileError, shutil.copyfile,
                                  __file__, TESTFN)
            finally:
                os.remove(TESTFN)

        @support.skip_unless_symlink
        def test_copytree_named_pipe(self) -> None:
            os.mkdir(TESTFN)
            try:
                subdir = os.path.join(TESTFN, "subdir")
                os.mkdir(subdir)
                pipe = os.path.join(subdir, "mypipe")
                os.mkfifo(pipe)
                try:
                    shutil.copytree(TESTFN, TESTFN2)
                except shutil.Error as e:
                    errors = e.args[0]
                    self.assertEqual(len(errors), 1)
                    src, dst, error_msg = errors[0]
                    self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
                else:
                    self.fail("shutil.Error should have been raised")
            finally:
                shutil.rmtree(TESTFN, ignore_errors=True)
                shutil.rmtree(TESTFN2, ignore_errors=True)

    def test_copytree_special_func(self) -> None:

        src_dir = self.mkdtemp()
        dst_dir = os.path.join(self.mkdtemp(), 'destination')
        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')

        copied = List[Tuple[str, str]]()

        def _copy(src: str, dst: str) -> None:
            copied.append((src, dst))

        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
        self.assertEqual(len(copied), 2)

    @support.skip_unless_symlink
    def test_copytree_dangling_symlinks(self) -> None:

        # a dangling symlink raises an error at the end
        src_dir = self.mkdtemp()
        dst_dir = os.path.join(self.mkdtemp(), 'destination')
        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
        os.mkdir(os.path.join(src_dir, 'test_dir'))
        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)

        # a dangling symlink is ignored with the proper flag
        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
        self.assertNotIn('test.txt', os.listdir(dst_dir))

        # a dangling symlink is copied if symlinks=True
        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
        shutil.copytree(src_dir, dst_dir, symlinks=True)
        self.assertIn('test.txt', os.listdir(dst_dir))

    def _copy_file(self, method: Function[[str, str],
                                          None]) -> Tuple[str, str]:
        fname = 'test.txt'
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, fname])
        file1 = os.path.join(tmpdir, fname)
        tmpdir2 = self.mkdtemp()
        method(file1, tmpdir2)
        file2 = os.path.join(tmpdir2, fname)
        return (file1, file2)

    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
    def test_copy(self) -> None:
        # Ensure that the copied file exists and has the same mode bits.
        file1, file2 = self._copy_file(shutil.copy)
        self.assertTrue(os.path.exists(file2))
        self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)

    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
    @unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
    def test_copy2(self) -> None:
        # Ensure that the copied file exists and has the same mode and
        # modification time bits.
        file1, file2 = self._copy_file(shutil.copy2)
        self.assertTrue(os.path.exists(file2))
        file1_stat = os.stat(file1)
        file2_stat = os.stat(file2)
        self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
        for attr in 'st_atime', 'st_mtime':
            # The modification times may be truncated in the new file.
            self.assertLessEqual(getattr(file1_stat, attr),
                                 getattr(file2_stat, attr) + 1)
        if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
            self.assertEqual(getattr(file1_stat, 'st_flags'),
                             getattr(file2_stat, 'st_flags'))

    @unittest.skipUnless(zlib, "requires zlib")
    def test_make_tarball(self) -> None:
        # creating something to tar
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, 'file1'], 'xxx')
        self.write_file([tmpdir, 'file2'], 'xxx')
        os.mkdir(os.path.join(tmpdir, 'sub'))
        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        unittest.skipUnless(
            splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
            "source and target should be on same drive")

        base_name = os.path.join(tmpdir2, 'archive')

        # working with relative paths to avoid tar warnings
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(splitdrive(base_name)[1], '.')
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        tarball = base_name + '.tar.gz'
        self.assertTrue(os.path.exists(tarball))

        # trying an uncompressed one
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

    def _tarinfo(self, path: str) -> tuple:
        tar = tarfile.open(path)
        try:
            names = tar.getnames()
            names.sort()
            return tuple(names)
        finally:
            tar.close()

    def _create_files(self) -> Tuple[str, str, str]:
        # creating something to tar
        tmpdir = self.mkdtemp()
        dist = os.path.join(tmpdir, 'dist')
        os.mkdir(dist)
        self.write_file([dist, 'file1'], 'xxx')
        self.write_file([dist, 'file2'], 'xxx')
        os.mkdir(os.path.join(dist, 'sub'))
        self.write_file([dist, 'sub', 'file3'], 'xxx')
        os.mkdir(os.path.join(dist, 'sub2'))
        tmpdir2 = self.mkdtemp()
        base_name = os.path.join(tmpdir2, 'archive')
        return tmpdir, tmpdir2, base_name

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(
        find_executable('tar') and find_executable('gzip'),
        'Need the tar command to run')
    def test_tarfile_vs_tar(self) -> None:
        tmpdir, tmpdir2, base_name = self._create_files()
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist')
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        tarball = base_name + '.tar.gz'
        self.assertTrue(os.path.exists(tarball))

        # now create another tarball using `tar`
        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            with captured_stdout() as s:
                spawn(tar_cmd)
                spawn(gzip_cmd)
        finally:
            os.chdir(old_dir)

        self.assertTrue(os.path.exists(tarball2))
        # let's compare both tarballs
        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))

        # trying an uncompressed one
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist', compress=None)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

        # now for a dry_run
        base_name = os.path.join(tmpdir2, 'archive')
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        try:
            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
        finally:
            os.chdir(old_dir)
        tarball = base_name + '.tar'
        self.assertTrue(os.path.exists(tarball))

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
    def test_make_zipfile(self) -> None:
        # creating something to tar
        tmpdir = self.mkdtemp()
        self.write_file([tmpdir, 'file1'], 'xxx')
        self.write_file([tmpdir, 'file2'], 'xxx')

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        base_name = os.path.join(tmpdir2, 'archive')
        _make_zipfile(base_name, tmpdir)

        # check if the compressed tarball was created
        tarball = base_name + '.zip'
        self.assertTrue(os.path.exists(tarball))

    def test_make_archive(self) -> None:
        tmpdir = self.mkdtemp()
        base_name = os.path.join(tmpdir, 'archive')
        self.assertRaises(ValueError, make_archive, base_name, 'xxx')

    @unittest.skipUnless(zlib, "Requires zlib")
    def test_make_archive_owner_group(self) -> None:
        # testing make_archive with owner and group, with various combinations
        # this works even if there's not gid/uid support
        if UID_GID_SUPPORT:
            group = grp.getgrgid(0).gr_name
            owner = pwd.getpwuid(0).pw_name
        else:
            group = owner = 'root'

        base_dir, root_dir, base_name = self._create_files()
        base_name = os.path.join(self.mkdtemp(), 'archive')
        res = make_archive(base_name,
                           'zip',
                           root_dir,
                           base_dir,
                           owner=owner,
                           group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name, 'zip', root_dir, base_dir)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name,
                           'tar',
                           root_dir,
                           base_dir,
                           owner=owner,
                           group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name,
                           'tar',
                           root_dir,
                           base_dir,
                           owner='kjhkjhkjg',
                           group='oihohoh')
        self.assertTrue(os.path.exists(res))

    @unittest.skipUnless(zlib, "Requires zlib")
    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
    def test_tarfile_root_owner(self) -> None:
        tmpdir, tmpdir2, base_name = self._create_files()
        old_dir = os.getcwd()
        os.chdir(tmpdir)
        group = grp.getgrgid(0).gr_name
        owner = pwd.getpwuid(0).pw_name
        try:
            archive_name = _make_tarball(base_name,
                                         'dist',
                                         compress=None,
                                         owner=owner,
                                         group=group)
        finally:
            os.chdir(old_dir)

        # check if the compressed tarball was created
        self.assertTrue(os.path.exists(archive_name))

        # now checks the rights
        archive = tarfile.open(archive_name)
        try:
            for member in archive.getmembers():
                self.assertEqual(member.uid, 0)
                self.assertEqual(member.gid, 0)
        finally:
            archive.close()

    def test_make_archive_cwd(self) -> None:
        current_dir = os.getcwd()

        def _breaks(*args: Any, **kw: Any) -> None:
            raise RuntimeError()

        register_archive_format('xxx', _breaks, [], 'xxx file')
        try:
            try:
                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
            except Exception:
                pass
            self.assertEqual(os.getcwd(), current_dir)
        finally:
            unregister_archive_format('xxx')

    def test_register_archive_format(self) -> None:

        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1 / 0, 1)
        self.assertRaises(TypeError, register_archive_format, 'xxx',
                          lambda: 1 / 0, [(1, 2), (1, 2, 3)])

        register_archive_format('xxx', lambda: 1 / 0, [('x', 2)], 'xxx file')
        formats = [name for name, params in get_archive_formats()]
        self.assertIn('xxx', formats)

        unregister_archive_format('xxx')
        formats = [name for name, params in get_archive_formats()]
        self.assertNotIn('xxx', formats)

    def _compare_dirs(self, dir1: str, dir2: str) -> List[str]:
        # check that dir1 and dir2 are equivalent,
        # return the diff
        diff = List[str]()
        for root, dirs, files in os.walk(dir1):
            for file_ in files:
                path = os.path.join(root, file_)
                target_path = os.path.join(dir2, os.path.split(path)[-1])
                if not os.path.exists(target_path):
                    diff.append(file_)
        return diff

    @unittest.skipUnless(zlib, "Requires zlib")
    def test_unpack_archive(self) -> None:
        formats = ['tar', 'gztar', 'zip']
        if BZ2_SUPPORTED:
            formats.append('bztar')

        for format in formats:
            tmpdir = self.mkdtemp()
            base_dir, root_dir, base_name = self._create_files()
            tmpdir2 = self.mkdtemp()
            filename = make_archive(base_name, format, root_dir, base_dir)

            # let's try to unpack it now
            unpack_archive(filename, tmpdir2)
            diff = self._compare_dirs(tmpdir, tmpdir2)
            self.assertEqual(diff, [])

            # and again, this time with the format specified
            tmpdir3 = self.mkdtemp()
            unpack_archive(filename, tmpdir3, format=format)
            diff = self._compare_dirs(tmpdir, tmpdir3)
            self.assertEqual(diff, [])
        self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
        self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')

    def test_unpack_registery(self) -> None:

        formats = get_unpack_formats()

        def _boo(filename: str, extract_dir: str, extra: int) -> None:
            self.assertEqual(extra, 1)
            self.assertEqual(filename, 'stuff.boo')
            self.assertEqual(extract_dir, 'xx')

        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
        unpack_archive('stuff.boo', 'xx')

        # trying to register a .boo unpacker again
        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
                          ['.boo'], _boo)

        # should work now
        unregister_unpack_format('Boo')
        register_unpack_format('Boo2', ['.boo'], _boo)
        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())

        # let's leave a clean state
        unregister_unpack_format('Boo2')
        self.assertEqual(get_unpack_formats(), formats)
Beispiel #38
0
import argparse
from PIL import Image
import os
import sys
import math
import ast
from distutils.spawn import find_executable
import subprocess

# Allow large images (this could lead to a denial of service attach if you're
# running this script on user-submitted images.)
Image.MAX_IMAGE_PIXELS = None

# Find external programs
try:
    nona = find_executable('nona')
except KeyError:
    # Handle case of PATH not being set
    nona = None

# Subclass parser to add explaination for semi-option nona flag
class GenParser(argparse.ArgumentParser):
    def error(self, message):
        if '--nona' in message:
            sys.stderr.write('''IMPORTANT: The location of nona must be specified with -n, since it was not
           found on the PATH!\n\n''')
        super(GenParser, self).error(message)

# Parse input
parser = GenParser(description='Generate a Pannellum multires tile set from a full or partial equirectangular or cylindrical panorama.',
                   formatter_class=argparse.ArgumentDefaultsHelpFormatter)
Beispiel #39
0
    from distutils.command.build_py import build_py as _build_py
from distutils.spawn import find_executable

# Find the Protocol Compiler.
if 'PROTOC' in os.environ and os.path.exists(os.environ['PROTOC']):
    protoc = os.environ['PROTOC']
elif os.path.exists("../src/protoc"):
    protoc = "../src/protoc"
elif os.path.exists("../src/protoc.exe"):
    protoc = "../src/protoc.exe"
elif os.path.exists("../vsprojects/Debug/protoc.exe"):
    protoc = "../vsprojects/Debug/protoc.exe"
elif os.path.exists("../vsprojects/Release/protoc.exe"):
    protoc = "../vsprojects/Release/protoc.exe"
else:
    protoc = find_executable("protoc")


def GetVersion():
    """Gets the version from google/protobuf/__init__.py

  Do not import google.protobuf.__init__ directly, because an installed
  protobuf library may be loaded instead."""

    with open(os.path.join('google', 'protobuf',
                           '__init__.py')) as version_file:
        exec(version_file.read(), globals())
        return __version__


def generate_proto(source, require=True):
Beispiel #40
0
def create_config_file(watch,
                       start_cmd,
                       pidfile,
                       port=None,
                       env_vars=None,
                       max_memory=None,
                       syslog_server=None,
                       check_port=False):
    """ Writes a monit configuration file for a service.

  Args:
    watch: A string which identifies this process with monit.
    start_cmd: The start command to start the process.
    pidfile: The location of the pidfile that the process creates.
    port: An integer specifying the port for the process.
    env_vars: A dictionary specifying environment variables.
    max_memory: An integer that specifies the maximum amount of memory in
      megabytes that the process should use.
    syslog_server: The IP address of the remote syslog server to use.
    check_port: A boolean specifying that monit should check host and port.
  """
    if check_port:
        assert port is not None, 'When using check_port, port must be defined'

    process_name = watch
    if port is not None:
        process_name += '-{}'.format(port)

    env_vars_str = ''
    if env_vars is not None:
        for key in env_vars:
            env_vars_str += '{}={} '.format(key, env_vars[key])

    bash = find_executable('bash')
    start_stop_daemon = find_executable('start-stop-daemon')

    logfile = os.path.join('/', 'var', 'log', 'appscale',
                           '{}.log'.format(process_name))

    if syslog_server is None:
        stop = 'start-stop-daemon --stop --pidfile {0} && rm {0}'.format(
            pidfile)
        bash_exec = 'exec env {vars} {start_cmd} >> {log} 2>&1'.format(
            vars=env_vars_str, start_cmd=start_cmd, log=logfile)
    else:
        # Kill entire process group.
        get_pgid = 'ps -o pgid= $(cat {}) | grep -o "[0-9]*"'.format(pidfile)
        stop = 'kill -- -$({}) && rm {}'.format(get_pgid, pidfile)
        bash_exec = 'exec env {vars} {start_cmd} 2>&1 | tee -a {log} | '\
                    'logger -t {watch} -u /tmp/ignored -n {syslog_server} -P 514'.\
          format(vars=env_vars_str, start_cmd=start_cmd, log=logfile, watch=watch,
                 syslog_server=syslog_server)

    start_line = ' '.join([
        start_stop_daemon, '--start', '--background', '--pidfile', pidfile,
        '--startas', "{} -- -c '{}'".format(bash, bash_exec)
    ])
    stop_line = "{} -c '{}'".format(bash, stop)

    with open(TEMPLATE_LOCATION) as template:
        output = template.read()
        output = output.format(process_name=process_name,
                               match_clause='PIDFILE "{}"'.format(pidfile),
                               group=watch,
                               start_line=start_line,
                               stop_line=stop_line)

    if max_memory is not None:
        output += '  if totalmem > {} MB for 10 cycles then restart\n'.format(
            max_memory)

    if check_port:
        private_ip = appscale_info.get_private_ip()
        output += '  if failed host {} port {} then restart\n'.format(
            private_ip, port)

    config_file = os.path.join(MONIT_CONFIG_DIR,
                               'appscale-{}.cfg'.format(process_name))
    file_io.write(config_file, output)

    return
Beispiel #41
0
class TC_20_DispVMMixin(object):
    def setUp(self):
        super(TC_20_DispVMMixin, self).setUp()
        if 'whonix-gw' in self.template:
            self.skipTest(
                'whonix-gw is not supported as DisposableVM Template')
        self.init_default_template(self.template)
        self.disp_base = self.app.add_new_vm(
            qubes.vm.appvm.AppVM,
            name=self.make_vm_name('dvm'),
            label='red',
            template_for_dispvms=True,
        )
        self.loop.run_until_complete(self.disp_base.create_on_disk())
        self.app.default_dispvm = self.disp_base
        self.app.save()

    def tearDown(self):
        self.app.default_dispvm = None
        super(TC_20_DispVMMixin, self).tearDown()

    def test_010_simple_dvm_run(self):
        dispvm = self.loop.run_until_complete(
            qubes.vm.dispvm.DispVM.from_appvm(self.disp_base))
        try:
            self.loop.run_until_complete(dispvm.start())
            (stdout, _) = self.loop.run_until_complete(
                dispvm.run_service_for_stdio('qubes.VMShell',
                                             input=b"echo test"))
            self.assertEqual(stdout, b"test\n")
        finally:
            self.loop.run_until_complete(dispvm.cleanup())

    @unittest.skipUnless(spawn.find_executable('xdotool'),
                         "xdotool not installed")
    def test_020_gui_app(self):
        dispvm = self.loop.run_until_complete(
            qubes.vm.dispvm.DispVM.from_appvm(self.disp_base))
        try:
            self.loop.run_until_complete(dispvm.start())
            self.loop.run_until_complete(self.wait_for_session(dispvm))
            p = self.loop.run_until_complete(
                dispvm.run_service('qubes.VMShell',
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE))
            # wait for DispVM startup:
            p.stdin.write(b"echo test\n")
            self.loop.run_until_complete(p.stdin.drain())
            l = self.loop.run_until_complete(p.stdout.readline())
            self.assertEqual(l, b"test\n")

            self.assertTrue(dispvm.is_running())
            try:
                window_title = 'user@%s' % (dispvm.name, )
                # close xterm on Return, but after short delay, to allow
                # xdotool to send also keyup event
                p.stdin.write("xterm -e "
                              "\"sh -c 'echo \\\"\033]0;{}\007\\\";read x;"
                              "sleep 0.1;'\"\n".format(window_title).encode())
                self.loop.run_until_complete(p.stdin.drain())
                self.wait_for_window(window_title)

                time.sleep(0.5)
                self.enter_keys_in_window(window_title, ['Return'])
                # Wait for window to close
                self.wait_for_window(window_title, show=False)
                p.stdin.close()
                self.loop.run_until_complete(asyncio.wait_for(p.wait(), 30))
            except:
                with suppress(ProcessLookupError):
                    p.terminate()
                self.loop.run_until_complete(p.wait())
                raise
            finally:
                del p
        finally:
            self.loop.run_until_complete(dispvm.cleanup())
            dispvm_name = dispvm.name
            del dispvm

        # give it a time for shutdown + cleanup
        self.loop.run_until_complete(asyncio.sleep(5))

        self.assertNotIn(dispvm_name, self.app.domains,
                         "DispVM not removed from qubes.xml")

    def _handle_editor(self, winid, copy=False):
        (window_title, _) = subprocess.Popen(
            ['xdotool', 'getwindowname', winid], stdout=subprocess.PIPE).\
            communicate()
        window_title = window_title.decode().strip().\
            replace('(', '\(').replace(')', '\)')
        time.sleep(1)
        if "gedit" in window_title or 'KWrite' in window_title or \
                'Mousepad' in window_title or 'Geany' in window_title:
            subprocess.check_call(
                ['xdotool', 'windowactivate', '--sync', winid])
            if copy:
                subprocess.check_call([
                    'xdotool', 'key', '--window', winid, 'key', 'ctrl+a',
                    'ctrl+c', 'ctrl+shift+c'
                ])
            else:
                subprocess.check_call(['xdotool', 'type', 'Test test 2'])
                subprocess.check_call(
                    ['xdotool', 'key', '--window', winid, 'key', 'Return'])
                time.sleep(0.5)
                subprocess.check_call(['xdotool', 'key', 'ctrl+s'])
            time.sleep(0.5)
            subprocess.check_call(['xdotool', 'key', 'ctrl+q'])
        elif "LibreOffice" in window_title:
            # wait for actual editor (we've got splash screen)
            search = subprocess.Popen([
                'xdotool', 'search', '--sync', '--onlyvisible', '--all',
                '--name', '--class', 'disp*|Writer'
            ],
                                      stdout=subprocess.PIPE,
                                      stderr=open(os.path.devnull, 'w'))
            retcode = search.wait()
            if retcode == 0:
                winid = search.stdout.read().strip()
            time.sleep(0.5)
            subprocess.check_call(
                ['xdotool', 'windowactivate', '--sync', winid])
            if copy:
                subprocess.check_call([
                    'xdotool', 'key', '--window', winid, 'key', 'ctrl+a',
                    'ctrl+c', 'ctrl+shift+c'
                ])
            else:
                subprocess.check_call(['xdotool', 'type', 'Test test 2'])
                subprocess.check_call(
                    ['xdotool', 'key', '--window', winid, 'key', 'Return'])
                time.sleep(0.5)
                subprocess.check_call(
                    ['xdotool', 'key', '--delay', '100', 'ctrl+s', 'Return'])
            time.sleep(0.5)
            subprocess.check_call(['xdotool', 'key', 'ctrl+q'])
        elif "emacs" in window_title:
            subprocess.check_call(
                ['xdotool', 'windowactivate', '--sync', winid])
            if copy:
                subprocess.check_call(
                    ['xdotool', 'key', 'ctrl+x', 'h', 'alt+w', 'ctrl+shift+c'])
            else:
                subprocess.check_call(['xdotool', 'type', 'Test test 2'])
                subprocess.check_call(
                    ['xdotool', 'key', '--window', winid, 'key', 'Return'])
                time.sleep(0.5)
                subprocess.check_call(['xdotool', 'key', 'ctrl+x', 'ctrl+s'])
            time.sleep(0.5)
            subprocess.check_call(['xdotool', 'key', 'ctrl+x', 'ctrl+c'])
        elif "vim" in window_title or "user@" in window_title:
            subprocess.check_call(
                ['xdotool', 'windowactivate', '--sync', winid])
            if copy:
                raise NotImplementedError('copy not implemented for vim')
            else:
                subprocess.check_call(
                    ['xdotool', 'key', 'i', 'type', 'Test test 2'])
                subprocess.check_call(
                    ['xdotool', 'key', '--window', winid, 'key', 'Return'])
                subprocess.check_call(
                    ['xdotool', 'key', 'Escape', 'colon', 'w', 'q', 'Return'])
        else:
            raise KeyError(window_title)

    @unittest.skipUnless(spawn.find_executable('xdotool'),
                         "xdotool not installed")
    def test_030_edit_file(self):
        self.testvm1 = self.app.add_new_vm(
            qubes.vm.appvm.AppVM,
            name=self.make_vm_name('vm1'),
            label='red',
            template=self.app.domains[self.template])
        self.loop.run_until_complete(self.testvm1.create_on_disk())
        self.app.save()

        self.loop.run_until_complete(self.testvm1.start())
        self.loop.run_until_complete(
            self.testvm1.run_for_stdio("echo test1 > /home/user/test.txt"))

        p = self.loop.run_until_complete(
            self.testvm1.run("qvm-open-in-dvm /home/user/test.txt",
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT))

        # if first 5 windows isn't expected editor, there is no hope
        winid = None
        for _ in range(5):
            try:
                winid = self.wait_for_window('disp[0-9]*',
                                             search_class=True,
                                             include_tray=False,
                                             timeout=60)
            except Exception as e:
                try:
                    self.loop.run_until_complete(asyncio.wait_for(p.wait(), 1))
                except asyncio.TimeoutError:
                    raise e
                else:
                    stdout = self.loop.run_until_complete(p.stdout.read())
                    self.fail('qvm-open-in-dvm exited prematurely with {}: {}'.
                              format(p.returncode, stdout))
            # let the application initialize
            self.loop.run_until_complete(asyncio.sleep(1))
            try:
                self._handle_editor(winid)
                break
            except KeyError:
                winid = None
        if winid is None:
            self.fail('Timeout waiting for editor window')

        self.loop.run_until_complete(p.communicate())
        (test_txt_content, _) = self.loop.run_until_complete(
            self.testvm1.run_for_stdio("cat /home/user/test.txt"))
        # Drop BOM if added by editor
        if test_txt_content.startswith(b'\xef\xbb\xbf'):
            test_txt_content = test_txt_content[3:]
        self.assertEqual(test_txt_content, b"Test test 2\ntest1\n")

    def _get_open_script(self, application):
        """Generate a script to instruct *application* to open *filename*"""
        if application == 'org.gnome.Nautilus':
            return ("#!/usr/bin/python3\n"
                    "import sys, os"
                    "from dogtail import tree, config\n"
                    "config.config.actionDelay = 1.0\n"
                    "config.config.defaultDelay = 1.0\n"
                    "config.config.searchCutoffCount = 10\n"
                    "app = tree.root.application('org.gnome.Nautilus')\n"
                    "app.child(os.path.basename(sys.argv[1])).doubleClick()\n"
                    ).encode()
        if application in ('mozilla-thunderbird', 'thunderbird'):
            with open(
                    '/usr/share/qubes/tests-data/'
                    'dispvm-open-thunderbird-attachment', 'rb') as f:
                return f.read()
        assert False

    @unittest.skipUnless(spawn.find_executable('xdotool'),
                         "xdotool not installed")
    def test_100_open_in_dispvm(self):
        self.testvm1 = self.app.add_new_vm(
            qubes.vm.appvm.AppVM,
            name=self.make_vm_name('vm1'),
            label='red',
            template=self.app.domains[self.template])
        self.loop.run_until_complete(self.testvm1.create_on_disk())
        self.app.save()

        app_id = 'mozilla-thunderbird'
        if 'debian' in self.template or 'whonix' in self.template:
            app_id = 'thunderbird'

        self.testvm1.features['service.app-dispvm.' + app_id] = '1'
        self.loop.run_until_complete(self.testvm1.start())
        self.loop.run_until_complete(
            self.testvm1.run_for_stdio("echo test1 > /home/user/test.txt"))

        self.loop.run_until_complete(
            self.testvm1.run_for_stdio("cat > /home/user/open-file",
                                       input=self._get_open_script(app_id)))
        self.loop.run_until_complete(
            self.testvm1.run_for_stdio("chmod +x /home/user/open-file"))

        self.loop.run_until_complete(
            self.testvm1.run_for_stdio(
                'gsettings set org.gnome.desktop.interface '
                'toolkit-accessibility true'))

        app = self.loop.run_until_complete(
            self.testvm1.run_service("qubes.StartApp+" + app_id))

        try:
            click_to_open = self.loop.run_until_complete(
                self.testvm1.run_for_stdio('./open-file test.txt',
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.STDOUT))
        except subprocess.CalledProcessError as err:
            with contextlib.suppress(asyncio.TimeoutError):
                self.loop.run_until_complete(asyncio.wait_for(app.wait(), 30))
            if app.returncode == 127:
                self.skipTest('{} not installed'.format(app_id))
            self.fail("'./open-file test.txt' failed with {}: {}{}".format(
                err.cmd, err.returncode, err.stdout, err.stderr))

        # if first 5 windows isn't expected editor, there is no hope
        winid = None
        for _ in range(5):
            winid = self.wait_for_window('disp[0-9]*',
                                         search_class=True,
                                         include_tray=False,
                                         timeout=60)
            # let the application initialize
            self.loop.run_until_complete(asyncio.sleep(1))
            try:
                # copy, not modify - attachment is set as read-only
                self._handle_editor(winid, copy=True)
                break
            except KeyError:
                winid = None
        if winid is None:
            self.fail('Timeout waiting for editor window')

        self.loop.run_until_complete(
            self.wait_for_window_hide_coro("editor", winid))

        with open('/var/run/qubes/qubes-clipboard.bin', 'rb') as f:
            test_txt_content = f.read()
        self.assertEqual(test_txt_content.strip(), b"test1")
Beispiel #42
0
    def run(self):
        if not self.dry_run:

            #-------------------------------------------------------------------
            # Check and set the environment up
            #-------------------------------------------------------------------
            target_dir = os.path.join(self.build_lib, 'PiPilot', 'ui')

            if os.path.exists(target_dir):
                rmtree(target_dir)

            ui_path = os.path.join(os.getcwd(), 'ui')
            if not os.path.exists(ui_path):
                print('[!] The ui directory does not exist')
                sys.exit(1)

            npm = find_executable('npm')
            if npm is None:
                print('[!] You need to have node installed to build this app')
                sys.exit(1)

            #-------------------------------------------------------------------
            # Build the JavaScript code
            #-------------------------------------------------------------------
            ret = run_command([npm, 'install'], ui_path)
            if ret != 0:
                print('[!] Installation of JavaScript dependencies failed')
                sys.exit(1)

            ret = run_command([npm, 'run-script', 'build'], ui_path)
            if ret != 0:
                print('[!] Build of JavaScript artefacts failed')
                sys.exit(1)

            #-------------------------------------------------------------------
            # Create a list of artefacts
            #-------------------------------------------------------------------
            artefacts = [
                'asset-manifest.json', 'favicon.png', 'index.html',
                'manifest.json', 'service-worker.js'
            ]

            build_dir = 'ui/build'
            asset_manifest = os.path.join(build_dir, artefacts[0])
            if not os.path.exists(asset_manifest):
                print('[!] Asset manifest does not exist.')
                sys.exit(1)

            assets = json.loads(open(asset_manifest, 'r').read())
            for _, asset in assets.items():
                artefacts.append(asset)

            #-------------------------------------------------------------------
            # Copy the artefacts to the dist root
            #-------------------------------------------------------------------
            print('Copying JavaScript artefacts to', target_dir)
            for artefact in artefacts:
                source_file = os.path.join(build_dir, artefact)
                target_file = os.path.join(target_dir, artefact)
                target_prefix = os.path.dirname(target_file)
                if not os.path.exists(target_prefix):
                    os.makedirs(target_prefix)
                copyfile(source_file, target_file)

        build_py.run(self)
Beispiel #43
0
def check_gulp():
    return bool(spawn.find_executable('gulp')), 'gulp', '#gulp'
__author__ = 'laharah'
__version__ = '0.3.0'

import re
import os
import tempfile
import inspect
import sys
import warnings
from types import MethodType
import functools

import killableprocess
from distutils import spawn

FILEBOT_EXE = spawn.find_executable('filebot')
if not FILEBOT_EXE:
    locations = [
        r'C:\Program Files\FileBot\filebot.exe', r'/usr/bin/filebot',
        r'/usr/local/bin/filebot', r'/snap/bin/filebot'
    ]
    for loc in locations:
        if os.path.exists(loc):
            FILEBOT_EXE = loc
            break
    else:
        warnings.warn("Could not find filebot executable!", stacklevel=2)
        FILEBOT_EXE = 'filebot'

FILEBOT_MODES = [
    'rename', 'move', 'check', 'get-missing-subtitles', 'get-subtitles', 'list',
Beispiel #45
0
def check_virtualenv():
    return bool(
        spawn.find_executable('virtualenv')), 'virtualenv', '#virtualenv'
Beispiel #46
0
    from shutil import which as find_executable

NAME = "kfp-pipeline-spec"
VERSION = "0.1.4"

PROTO_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__),
                                          os.pardir))

PKG_DIR = os.path.realpath(
    os.path.join(os.path.dirname(__file__), "kfp", "pipeline_spec"))

# Find the Protocol Compiler. (Taken from protobuf/python/setup.py)
if "PROTOC" in os.environ and os.path.exists(os.environ["PROTOC"]):
    PROTOC = os.environ["PROTOC"]
else:
    PROTOC = find_executable("protoc")


def GenerateProto(source):
    """Generate a _pb2.py from a .proto file.

  Invokes the Protocol Compiler to generate a _pb2.py from the given
  .proto file.  Does nothing if the output already exists and is newer than
  the input.

  Args:
    source: The source proto file that needs to be compiled.
  """

    output = source.replace(".proto", "_pb2.py")
Beispiel #47
0
def check_nodejs():
    return bool(spawn.find_executable('node')), 'Node.js', '#nodejs'
Beispiel #48
0
def check_bower():
    return bool(spawn.find_executable('bower')), 'bower', '#bower'
Beispiel #49
0
    def effect(self):
        self.zoom = float(
            self.document.xpath('//sodipodi:namedview/@inkscape:zoom',
                                namespaces=inkex.NSS)[0])
        self.width = 1/self.zoom * \
            float(self.document.xpath(
                '//sodipodi:namedview/@inkscape:window-width', namespaces=inkex.NSS)[0])
        self.height = 1/self.zoom * \
            float(self.document.xpath(
                '//sodipodi:namedview/@inkscape:window-height', namespaces=inkex.NSS)[0])
        self.width = self.unittouu(str(self.width) + 'px')
        self.height = self.unittouu(str(self.height) + 'px')

        self.options.scale = float(self.options.scale)
        action = self.options.action.strip("\"")
        if action == "viewold":
            for i in self.options.ids:
                node = self.selected[i]
                if node.tag != '{%s}g' % SVG_NS:
                    continue
                if '{%s}text' % WriteTexNS in node.attrib:
                    if self.options.tosvg == "true":
                        doc = inkex.etree.fromstring(
                            '<text x="%g" y="%g">%s</text>' %
                            (self.view_center[0] - self.width / 6,
                             self.view_center[1] - self.height / 6,
                             node.attrib.get('{%s}text' % WriteTexNS,
                                             '').decode('string-escape')))
                        p = node.getparent()
                        # p.remove(node)
                        p.append(doc)
                    else:
                        print >> sys.stderr, node.attrib.get(
                            '{%s}text' % WriteTexNS,
                            '').decode('string-escape')
                    return
            print >> sys.stderr, "No text find."
            return
        else:
            if action == "new":
                self.text = self.options.formula
            else:
                f = open(self.options.inputfile)
                self.text = f.read()
                f.close()

            if self.text == "":
                print >> sys.stderr, "empty LaTeX input. Nothing is changed."
                return

            tmp_dir = tempfile.mkdtemp("", "writetex-")
            tex_file = os.path.join(tmp_dir, "writetex.tex")
            svg_file = os.path.join(tmp_dir, "writetex.svg")
            pdf_file = os.path.join(tmp_dir, "writetex.pdf")
            log_file = os.path.join(tmp_dir, "writetex.log")
            out_file = os.path.join(tmp_dir, "writetex.out")
            err_file = os.path.join(tmp_dir, "writetex.err")
            aux_file = os.path.join(tmp_dir, "writetex.aux")
            crop_file = os.path.join(tmp_dir, "writetex-crop.pdf")

            if self.options.preline == "true":
                preamble = self.options.preamble
            else:
                if self.options.preamble == "":
                    preamble = ""
                else:
                    f = open(self.options.preamble)
                    preamble = f.read()
                    f.close()

            self.tex = r"""
            \documentclass[landscape,a3paper]{article}
            \usepackage{geometry}
            %s
            \pagestyle{empty}
            \begin{document}
            \noindent
            %s
            \end{document}
            """ % (preamble, self.text)

            tex = open(tex_file, 'w')
            tex.write(self.tex)
            tex.close()

            if self.options.latexcmd.lower() == "xelatex":
                subprocess.call(
                    'xelatex "-output-directory=%s" -interaction=nonstopmode -halt-on-error "%s" > "%s"'
                    % (tmp_dir, tex_file, out_file),
                    shell=True)
            elif self.options.latexcmd.lower() == "pdflatex":
                subprocess.call(
                    'pdflatex "-output-directory=%s" -interaction=nonstopmode -halt-on-error "%s" > "%s"'
                    % (tmp_dir, tex_file, out_file),
                    shell=True)
            else:
                # Setting `latexcmd` to following string produces the same result as xelatex condition:
                # 'xelatex "-output-directory={tmp_dir}" -interaction=nonstopmode -halt-on-error "{tex_file}" > "{out_file}"'
                subprocess.call(self.options.latexcmd.format(
                    tmp_dir=tmp_dir, tex_file=tex_file, out_file=out_file),
                                shell=True)

            try:
                if not isinstance(spawn.find_executable('pdfcrop'),
                                  type(None)):
                    # Here is a bug in pdfcrop, no idea how to fix.
                    crop_cmd = 'pdfcrop "%s"' % pdf_file
                    crop = subprocess.Popen(crop_cmd,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE,
                                            shell=True)
                    out = crop.communicate()
                    if len(out[1]) > 0:
                        inkex.errormsg("Error in pdfcrop:")
                        inkex.errormsg(" CMD executed: %s" % crop_cmd)
                        for msg in out:
                            inkex.errormsg(msg)
                        inkex.errormsg("Process will continue without crop")

                    if os.path.exists(crop_file):
                        os.remove(pdf_file)
                        os.rename(crop_file, pdf_file)
            except:
                pass

            if not os.path.exists(pdf_file):
                print >> sys.stderr, "Latex error: check your latex file and preamble."
                print >> sys.stderr, open(log_file).read()
                return
            else:
                if self.options.pdftosvg == '1':
                    subprocess.call('pdf2svg %s %s' % (pdf_file, svg_file),
                                    shell=True)
                    self.merge_pdf2svg_svg(svg_file)
                else:
                    subprocess.call(
                        'pstoedit -f plot-svg "%s" "%s"  -dt -ssp -psarg -r9600x9600 > "%s" 2> "%s"'
                        % (pdf_file, svg_file, out_file, err_file),
                        shell=True)
                    self.merge_pstoedit_svg(svg_file)

            os.remove(tex_file)
            os.remove(log_file)
            os.remove(out_file)
            if os.path.exists(err_file):
                os.remove(err_file)
            if os.path.exists(aux_file):
                os.remove(aux_file)
            if os.path.exists(svg_file):
                os.remove(svg_file)
            if os.path.exists(pdf_file):
                os.remove(pdf_file)
            os.rmdir(tmp_dir)
Beispiel #50
0
def check_pip():
    return bool(spawn.find_executable('pip')), 'pip', '#pip'
Beispiel #51
0
# Root directory of project
__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))

# Data directory
__C.DATA_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'data'))

# Model directory
__C.MODELS_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'models', 'pascal_voc'))

# Name (or path to) the matlab executable
__C.MATLAB = 'matlab'

# Place outputs under an experiments directory
__C.EXP_DIR = 'default'

if spawn.find_executable("nvcc"):
    # Use GPU implementation of non-maximum suppression
    __C.USE_GPU_NMS = True

    # Default GPU device id
    __C.GPU_ID = 0
else:
    __C.USE_GPU_NMS = False


def get_output_dir(imdb, weights_filename):
    """Return the directory where experimental artifacts are placed.
    If the directory does not exist, it is created.

    A canonical path is built using the name from an imdb and a network
    (if not None).
Beispiel #52
0
def check_git():
    return bool(spawn.find_executable('git')), 'Git', '#git'
Beispiel #53
0
    def __init__(self):
        self.cmakeExecutable = find_executable('cmake')

        if not self.cmakeExecutable or not os.path.exists(
                self.cmakeExecutable):
            raise Exception(
                "Couldn't find cmake executable. Please download and install it from www.cmake.org/download"
            )

        self.generatorHelpList = []
        self.generatorNames = []
        self.generatorAliasMap = {}

        self._cmakeError = None

        errOutput = ""
        try:
            errOutFile, errOutFilePath = tempfile.mkstemp()
            try:
                cmakeHelp = subprocess.check_output(
                    "\"" + self.cmakeExecutable + "\" --help",
                    shell=True,
                    universal_newlines=True,
                    stderr=errOutFile)
            finally:
                os.close(errOutFile)
                try:
                    with open(errOutFilePath, "r") as f:
                        errOutput = f.read().strip()
                except:
                    pass
                os.remove(errOutFilePath)

        except subprocess.CalledProcessError as e:
            self._cmakeError = error.CMakeProblemError(e, errOutput)

        if self._cmakeError is None:
            cmakeHelp = cmakeHelp.strip()

            # the generator list is at the end of the output. And the entries are all indented.

            for line in reversed(cmakeHelp.splitlines()):
                if not line or not line.startswith(" "):
                    break

                self.generatorHelpList.append(line)

            self.generatorHelpList.reverse()

            for line in self.generatorHelpList:
                if line.startswith("  ") and len(line) > 2 and line[2] != " ":
                    line = line.strip()

                    name, sep, rest = line.partition(" = ")
                    if sep:
                        name = name.strip()
                        if name:
                            self.generatorNames.append(name)

            vsPrefix = "Visual Studio "
            for name in self.generatorNames:
                if name.startswith(vsPrefix):
                    words = name[len(vsPrefix):].strip().split()
                    if len(words) >= 2:
                        try:
                            internalVersion = int(words[0])
                            yearVersion = int(words[1])

                            self.generatorAliasMap[
                                "vs" +
                                words[1]] = vsPrefix + words[0] + " " + words[1]
                        except Exception as e:
                            # ignore exceptions. The generator string does not have the expected format.
                            pass

            self.generatorAliasMap["make"] = "Unix Makefiles"
            self.generatorAliasMap["nmake"] = "NMake Makefiles"
            self.generatorAliasMap["msysmake"] = "MSYS Makefiles"
            self.generatorAliasMap["mingwmake"] = "MinGW Makefiles"

            if "CodeBlocks - Unix Makefiles" in self.generatorNames:
                self.generatorAliasMap[
                    "codeblocks"] = "CodeBlocks - Unix Makefiles"

            if "CodeLite - Unix Makefiles" in self.generatorNames:
                self.generatorAliasMap[
                    "codelite"] = "CodeLite - Unix Makefiles"

        self.generatorAliasHelp = "Aliases for build system names:\n"
        for aliasName in sorted(self.generatorAliasMap.keys()):
            self.generatorAliasHelp += "\n%s = %s" % (
                aliasName, self.generatorAliasMap[aliasName])
    def validate(self, parser, options, context):
        """Validate generic options."""

        # and android doesn't use 'app' the same way, so skip validation
        if parser.app != 'android':
            if options.app is None:
                if build_obj:
                    from mozbuild.base import BinaryNotFoundException
                    try:
                        options.app = build_obj.get_binary_path()
                    except BinaryNotFoundException as e:
                        print('{}\n\n{}\n'.format(e, e.help()))
                        sys.exit(1)
                else:
                    parser.error(
                        "could not find the application path, --appname must be specified"
                    )
            elif options.app == "dist" and build_obj:
                options.app = build_obj.get_binary_path(where='staged-package')

            options.app = self.get_full_path(options.app, parser.oldcwd)
            if not os.path.exists(options.app):
                parser.error(
                    "Error: Path {} doesn't exist. Are you executing "
                    "$objdir/_tests/testing/mochitest/runtests.py?".format(
                        options.app))

        if options.flavor is None:
            options.flavor = 'plain'

        for value in ALL_FLAVORS.values():
            if options.flavor in value['aliases']:
                options.flavor = value['suite']
                break

        if options.gmp_path is None and options.app and build_obj:
            # Need to fix the location of gmp_fake which might not be shipped in the binary
            gmp_modules = (('gmp-fake', '1.0'), ('gmp-clearkey', '0.1'),
                           ('gmp-fakeopenh264', '1.0'))
            options.gmp_path = os.pathsep.join(
                os.path.join(build_obj.bindir, *p) for p in gmp_modules)

        if options.totalChunks is not None and options.thisChunk is None:
            parser.error(
                "thisChunk must be specified when totalChunks is specified")

        if options.extra_mozinfo_json:
            if not os.path.isfile(options.extra_mozinfo_json):
                parser.error("Error: couldn't find mozinfo.json at '%s'." %
                             options.extra_mozinfo_json)

            options.extra_mozinfo_json = json.load(
                open(options.extra_mozinfo_json))

        if options.totalChunks:
            if not 1 <= options.thisChunk <= options.totalChunks:
                parser.error("thisChunk must be between 1 and totalChunks")

        if options.chunkByDir and options.chunkByRuntime:
            parser.error(
                "can only use one of --chunk-by-dir or --chunk-by-runtime")

        if options.xrePath is None:
            # default xrePath to the app path if not provided
            # but only if an app path was explicitly provided
            if options.app != parser.get_default('app'):
                options.xrePath = os.path.dirname(options.app)
                if mozinfo.isMac:
                    options.xrePath = os.path.join(
                        os.path.dirname(options.xrePath), "Resources")
            elif build_obj is not None:
                # otherwise default to dist/bin
                options.xrePath = build_obj.bindir
            else:
                parser.error(
                    "could not find xre directory, --xre-path must be specified"
                )

        # allow relative paths
        if options.xrePath:
            options.xrePath = self.get_full_path(options.xrePath,
                                                 parser.oldcwd)

        if options.profilePath:
            options.profilePath = self.get_full_path(options.profilePath,
                                                     parser.oldcwd)

        if options.utilityPath:
            options.utilityPath = self.get_full_path(options.utilityPath,
                                                     parser.oldcwd)

        if options.certPath:
            options.certPath = self.get_full_path(options.certPath,
                                                  parser.oldcwd)
        elif build_obj:
            options.certPath = os.path.join(build_obj.topsrcdir, 'build',
                                            'pgo', 'certs')

        if options.symbolsPath and len(urlparse(
                options.symbolsPath).scheme) < 2:
            options.symbolsPath = self.get_full_path(options.symbolsPath,
                                                     parser.oldcwd)
        elif not options.symbolsPath and build_obj:
            options.symbolsPath = os.path.join(build_obj.distdir,
                                               'crashreporter-symbols')

        if options.debugOnFailure and not options.jsdebugger:
            parser.error("--debug-on-failure requires --jsdebugger.")

        if options.jsdebuggerPath and not options.jsdebugger:
            parser.error("--jsdebugger-path requires --jsdebugger.")

        if options.debuggerArgs and not options.debugger:
            parser.error("--debugger-args requires --debugger.")

        if options.valgrind or options.debugger:
            # valgrind and some debuggers may cause Gecko to start slowly. Make sure
            # marionette waits long enough to connect.
            options.marionette_startup_timeout = 900
            options.marionette_socket_timeout = 540

        if options.store_chrome_manifest:
            options.store_chrome_manifest = os.path.abspath(
                options.store_chrome_manifest)
            if not os.path.isdir(os.path.dirname(
                    options.store_chrome_manifest)):
                parser.error(
                    "directory for %s does not exist as a destination to copy a "
                    "chrome manifest." % options.store_chrome_manifest)

        if options.jscov_dir_prefix:
            options.jscov_dir_prefix = os.path.abspath(
                options.jscov_dir_prefix)
            if not os.path.isdir(options.jscov_dir_prefix):
                parser.error(
                    "directory %s does not exist as a destination for coverage "
                    "data." % options.jscov_dir_prefix)

        if options.testingModulesDir is None:
            # Try to guess the testing modules directory.
            possible = [os.path.join(here, os.path.pardir, 'modules')]
            if build_obj:
                possible.insert(
                    0, os.path.join(build_obj.topobjdir, '_tests', 'modules'))

            for p in possible:
                if os.path.isdir(p):
                    options.testingModulesDir = p
                    break

        # Paths to specialpowers and mochijar from the tests archive.
        options.stagedAddons = [
            os.path.join(here, 'extensions', 'specialpowers'),
            os.path.join(here, 'mochijar'),
        ]
        if build_obj:
            objdir_xpi_stage = os.path.join(build_obj.distdir, 'xpi-stage')
            if os.path.isdir(objdir_xpi_stage):
                options.stagedAddons = [
                    os.path.join(objdir_xpi_stage, 'specialpowers'),
                    os.path.join(objdir_xpi_stage, 'mochijar'),
                ]
            plugins_dir = os.path.join(build_obj.distdir, 'plugins')
            if os.path.isdir(
                    plugins_dir
            ) and plugins_dir not in options.extraProfileFiles:
                options.extraProfileFiles.append(plugins_dir)

        # Even if buildbot is updated, we still want this, as the path we pass in
        # to the app must be absolute and have proper slashes.
        if options.testingModulesDir is not None:
            options.testingModulesDir = os.path.normpath(
                options.testingModulesDir)

            if not os.path.isabs(options.testingModulesDir):
                options.testingModulesDir = os.path.abspath(
                    options.testingModulesDir)

            if not os.path.isdir(options.testingModulesDir):
                parser.error('--testing-modules-dir not a directory: %s' %
                             options.testingModulesDir)

            options.testingModulesDir = options.testingModulesDir.replace(
                '\\', '/')
            if options.testingModulesDir[-1] != '/':
                options.testingModulesDir += '/'

        if options.runUntilFailure:
            if not options.repeat:
                options.repeat = 29

        if options.dumpOutputDirectory is None:
            options.dumpOutputDirectory = tempfile.gettempdir()

        if options.dumpAboutMemoryAfterTest or options.dumpDMDAfterTest:
            if not os.path.isdir(options.dumpOutputDirectory):
                parser.error('--dump-output-directory not a directory: %s' %
                             options.dumpOutputDirectory)

        if options.useTestMediaDevices:
            if not mozinfo.isLinux:
                parser.error(
                    '--use-test-media-devices is only supported on Linux currently'
                )

            gst01 = spawn.find_executable("gst-launch-0.1")
            gst010 = spawn.find_executable("gst-launch-0.10")
            gst10 = spawn.find_executable("gst-launch-1.0")
            pactl = spawn.find_executable("pactl")

            if not (gst01 or gst10 or gst010):
                parser.error('Missing gst-launch-{0.1,0.10,1.0}, required for '
                             '--use-test-media-devices')

            if not pactl:
                parser.error('Missing binary pactl required for '
                             '--use-test-media-devices')

        # The a11y and chrome flavors can't run with e10s.
        if options.flavor in ('a11y', 'chrome') and options.e10s:
            parser.error("mochitest-{} does not support e10s, try again with "
                         "--disable-e10s.".format(options.flavor))

        if options.enable_fission:
            options.extraPrefs.append("fission.autostart=true")
            options.extraPrefs.append(
                "dom.serviceWorkers.parent_intercept=true")
            options.extraPrefs.append("browser.tabs.documentchannel=true")

        options.leakThresholds = {
            "default": options.defaultLeakThreshold,
            "tab": options.defaultLeakThreshold,
            "forkserver": options.defaultLeakThreshold,
            # GMP rarely gets a log, but when it does, it leaks a little.
            "gmplugin": 20000,
            "rdd": 400,
        }

        # See the dependencies of bug 1401764.
        if mozinfo.isWin:
            options.leakThresholds["tab"] = 1000

        # XXX We can't normalize test_paths in the non build_obj case here,
        # because testRoot depends on the flavor, which is determined by the
        # mach command and therefore not finalized yet. Conversely, test paths
        # need to be normalized here for the mach case.
        if options.test_paths and build_obj:
            # Normalize test paths so they are relative to test root
            options.test_paths = [
                build_obj._wrap_path_argument(p).relpath()
                for p in options.test_paths
            ]

        return options
Beispiel #55
0
    def remap_file(self, inFileName, outFileName, variableList=None,
                   overwrite=False, renormalize=None, logger=None):  # {{{
        '''
        Given a source file defining either an MPAS mesh or a lat-lon grid and
        a destination file or set of arrays defining a lat-lon grid, constructs
        a mapping file used for interpolation between the source and
        destination grids.

        Parameters
        ----------
        inFileName : str
            The path to the file containing a data set on the source grid

        outFileName : str
            The path where the data on the destination grid should be written

        variableList : list of str, optional
            A list of variables to be mapped.  By default, all variables are
            mapped

        overwrite : bool, optional
            Whether the destination file should be overwritten if it already
            exists. If `False`, and the destination file is already present,
            the function does nothing and returns immediately

        renormalize : float, optional
            A threshold to use to renormalize the data

        logger : ``logging.Logger``, optional
            A logger to which ncclimo output should be redirected

        Raises
        ------
        OSError
            If ``ncremap`` is not in the system path.

        ValueError
            If ``mappingFileName`` is ``None`` (meaning no remapping is
            needed).
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        if self.mappingFileName is None:
            raise ValueError('No mapping file was given because remapping is '
                             'not necessary. The calling\n'
                             'code should simply use the constents of {} '
                             'directly.'.format(inFileName))

        if not overwrite and os.path.exists(outFileName):
            # a remapped file already exists, so nothing to do
            return

        if isinstance(self.sourceDescriptor, ProjectionGridDescriptor):
            raise TypeError('Source grid is a projection grid, not supported '
                            'by ncremap.\n'
                            'Consider using Remapper.remap')
        if isinstance(self.destinationDescriptor, ProjectionGridDescriptor):
            raise TypeError('Destination grid is a projection grid, not '
                            'supported by ncremap.\n'
                            'Consider using Remapper.remap')

        if find_executable('ncremap') is None:
            raise OSError('ncremap not found. Make sure the latest nco '
                          'package is installed: \n'
                          'conda install nco\n'
                          'Note: this presumes use of the conda-forge '
                          'channel.')

        args = ['ncremap',
                '-i', inFileName,
                '-m', self.mappingFileName,
                '--vrb=1',
                '-o', outFileName]

        regridArgs = []

        if renormalize is not None:
            regridArgs.append('--renormalize={}'.format(renormalize))

        if isinstance(self.sourceDescriptor, LatLonGridDescriptor):
            regridArgs.extend(['--rgr lat_nm={}'.format(
                                   self.sourceDescriptor.latVarName),
                               '--rgr lon_nm={}'.format(
                                   self.sourceDescriptor.lonVarName)])

        if len(regridArgs) > 0:
            args.extend(['-R', ' '.join(regridArgs)])

        if isinstance(self.sourceDescriptor, MpasMeshDescriptor):
            # Note: using the -C (climatology) flag for now because otherwise
            #       ncremap tries to add a _FillValue attribute that might
            #       already be present and quits with an error
            args.extend(['-P', 'mpas', '-C'])

        if variableList is not None:
            args.extend(['-v', ','.join(variableList)])

        # set an environment variable to make sure we're not using czender's
        # local version of NCO instead of one we have intentionally loaded
        env = os.environ.copy()
        env['NCO_PATH_OVERRIDE'] = 'No'

        if logger is None:
            print('running: {}'.format(' '.join(args)))
            # make sure any output is flushed before we add output from the
            # subprocess
            sys.stdout.flush()
            sys.stderr.flush()

            subprocess.check_call(args, env=env)
        else:
            logger.info('running: {}'.format(' '.join(args)))
            for handler in logger.handlers:
                handler.flush()

            process = subprocess.Popen(args, stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE, env=env)
            stdout, stderr = process.communicate()

            if stdout:
                stdout = stdout.decode('utf-8')
                for line in stdout.split('\n'):
                    logger.info(line)
            if stderr:
                stderr = stderr.decode('utf-8')
                for line in stderr.split('\n'):
                    logger.error(line)

            if process.returncode != 0:
                raise subprocess.CalledProcessError(process.returncode,
                                                    ' '.join(args))
Beispiel #56
0
    def __init__(self, path, cache_dir):  # pylint: disable=too-many-branches
        """Initialize ClangFormat."""
        self.path = None
        clang_format_progname_ext = ""

        if sys.platform == "win32":
            clang_format_progname_ext += ".exe"

        # Check the clang-format the user specified
        if path is not None:
            if os.path.isfile(path):
                self.path = path
            else:
                print("WARNING: Could not find clang-format %s" % (path))

        # Check the environment variable
        if "MONGO_CLANG_FORMAT" in os.environ:
            self.path = os.environ["MONGO_CLANG_FORMAT"]

            if self.path and not self._validate_version():
                self.path = None

        # Check the users' PATH environment variable now
        if self.path is None:
            # Check for various versions staring with binaries with version specific suffixes in the
            # user's path
            programs = [
                CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION,
                CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_SHORT_VERSION,
                CLANG_FORMAT_PROGNAME,
            ]

            if sys.platform == "win32":
                for i, _ in enumerate(programs):
                    programs[i] += '.exe'

            for program in programs:
                self.path = spawn.find_executable(program)

                if self.path:
                    if not self._validate_version():
                        self.path = None
                    else:
                        break

        # If Windows, try to grab it from Program Files
        # Check both native Program Files and WOW64 version
        if sys.platform == "win32":
            programfiles = [
                os.environ["ProgramFiles"],
                os.environ["ProgramFiles(x86)"],
            ]

            for programfile in programfiles:
                win32bin = os.path.join(programfile, "LLVM\\bin\\clang-format.exe")
                if os.path.exists(win32bin):
                    self.path = win32bin
                    break

        # Have not found it yet, download it from the web
        if self.path is None:
            if not os.path.isdir(cache_dir):
                os.makedirs(cache_dir)

            self.path = os.path.join(
                cache_dir,
                CLANG_FORMAT_PROGNAME + "-" + CLANG_FORMAT_VERSION + clang_format_progname_ext)

            # Download a new version if the cache is empty or stale
            if not os.path.isfile(self.path) or not self._validate_version():
                if sys.platform.startswith("linux"):
                    get_clang_format_from_linux_cache(self.path)
                elif sys.platform == "darwin":
                    get_clang_format_from_darwin_cache(self.path)
                else:
                    print("ERROR: clang-format.py does not support downloading clang-format " +
                          " on this platform, please install clang-format " + CLANG_FORMAT_VERSION)

        # Validate we have the correct version
        # We only can fail here if the user specified a clang-format binary and it is the wrong
        # version
        if not self._validate_version():
            print("ERROR: exiting because of previous warning.")
            sys.exit(1)

        self.print_lock = threading.Lock()
Beispiel #57
0
import sys
import tempfile
import warnings
from distutils.spawn import find_executable
from subprocess import PIPE, Popen

import numpy as np

from mbuild import clone
from mbuild.box import Box
from mbuild.compound import Compound
from mbuild.exceptions import MBuildError

__all__ = ["fill_box", "fill_region", "fill_sphere", "solvate"]

PACKMOL = find_executable("packmol")
PACKMOL_HEADER = """
tolerance {0:.16f}
filetype xyz
output {1}
seed {2}
sidemax {3}

"""
PACKMOL_SOLUTE = """
structure {0}
    number 1
    center
    fixed {1:.3f} {2:.3f} {3:.3f} 0. 0. 0.
end structure
"""
Beispiel #58
0
    def build_mapping_file(self, method='bilinear',
                           additionalArgs=None, logger=None):  # {{{
        '''
        Given a source file defining either an MPAS mesh or a lat-lon grid and
        a destination file or set of arrays defining a lat-lon grid, constructs
        a mapping file used for interpolation between the source and
        destination grids.

        Parameters
        ----------
        method : {'bilinear', 'neareststod', 'conserve'}, optional
            The method of interpolation used, see documentation for
            `ESMF_RegridWeightGen` for details.

        additionalArgs : list of str, optional
            A list of additional arguments to ``ESMF_RegridWeightGen``

        logger : ``logging.Logger``, optional
            A logger to which ncclimo output should be redirected

        Raises
        ------
        OSError
            If ``ESMF_RegridWeightGen`` is not in the system path.

        ValueError
            If sourceDescriptor or destinationDescriptor is of an unknown type
        '''
        # Authors
        # -------
        # Xylar Asay-Davis

        if self.mappingFileName is None or \
                os.path.exists(self.mappingFileName):
            # a valid weight file already exists, so nothing to do
            return

        if find_executable('ESMF_RegridWeightGen') is None:
            raise OSError('ESMF_RegridWeightGen not found. Make sure esmf '
                          'package is installed via\n'
                          'latest nco: \n'
                          'conda install nco\n'
                          'Note: this presumes use of the conda-forge '
                          'channel.')

        # Write source and destination SCRIP files in temporary locations
        self.sourceDescriptor.to_scrip(_get_temp_path())
        self.destinationDescriptor.to_scrip(_get_temp_path())

        args = ['ESMF_RegridWeightGen',
                '--source', self.sourceDescriptor.scripFileName,
                '--destination', self.destinationDescriptor.scripFileName,
                '--weight', self.mappingFileName,
                '--method', method,
                '--netcdf4',
                '--no_log']

        if self.sourceDescriptor.regional:
            args.append('--src_regional')

        if self.destinationDescriptor.regional:
            args.append('--dst_regional')

        if self.sourceDescriptor.regional or \
                self.destinationDescriptor.regional:
            args.append('--ignore_unmapped')

        if additionalArgs is not None:
            args.extend(additionalArgs)

        if logger is None:
            print('running: {}'.format(' '.join(args)))
            # make sure any output is flushed before we add output from the
            # subprocess
            sys.stdout.flush()
            sys.stderr.flush()

            # throw out the standard output from ESMF_RegridWeightGen, as it's
            # rather verbose but keep stderr
            DEVNULL = open(os.devnull, 'wb')
            subprocess.check_call(args, stdout=DEVNULL)

        else:
            logger.info('running: {}'.format(' '.join(args)))
            for handler in logger.handlers:
                handler.flush()

            process = subprocess.Popen(args, stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE)
            stdout, stderr = process.communicate()

            # throw out the standard output from ESMF_RegridWeightGen, as it's
            # rather verbose but keep stderr
            if stderr:
                for line in stderr.split('\n'):
                    logger.error(line)

            if process.returncode != 0:
                raise subprocess.CalledProcessError(process.returncode,
                                                    ' '.join(args))

        # remove the temporary SCRIP files
        os.remove(self.sourceDescriptor.scripFileName)
        os.remove(self.destinationDescriptor.scripFileName)
Beispiel #59
0
        role_events = [
            event for event in r.events if event.get('event_data', {}).get(
                'role', '') == "benthomasson.hello_role"
        ]
        assert 'runner_on_ok' in [event['event'] for event in role_events]
        for event in role_events:
            event_data = event['event_data']
            assert not event_data.get(
                'warning', False)  # role use should not contain warnings
            if event['event'] == 'runner_on_ok':
                assert event_data['res']['msg'] == 'Hello world!'
    finally:
        shutil.rmtree('test/integration/artifacts')


@pytest.mark.skipif(find_executable('cgexec') is None,
                    reason="cgexec not available")
@pytest.mark.skipif(LooseVersion(
    pkg_resources.get_distribution('ansible').version) < LooseVersion('2.8'),
                    reason="Valid only on Ansible 2.8+")
def test_profile_data():
    tdir = tempfile.mkdtemp()
    try:
        r = run(private_data_dir=tdir,
                inventory="localhost ansible_connection=local",
                resource_profiling=True,
                resource_profiling_base_cgroup='ansible-runner',
                playbook=[{
                    'hosts': 'all',
                    'gather_facts': False,
                    'tasks': [{
Beispiel #60
0
import os
import tempfile
import warnings

import pytest

from matplotlib.font_manager import (findfont, FontProperties, fontManager,
                                     json_dump, json_load, get_font,
                                     get_fontconfig_fonts,
                                     is_opentype_cff_font, fontManager as fm)
from matplotlib import rc_context

if six.PY2:
    from distutils.spawn import find_executable
    has_fclist = find_executable('fc-list') is not None
else:
    # py >= 3.3
    from shutil import which
    has_fclist = which('fc-list') is not None


def test_font_priority():
    with rc_context(rc={'font.sans-serif': ['cmmi10', 'Bitstream Vera Sans']}):
        font = findfont(FontProperties(family=["sans-serif"]))
    assert os.path.basename(font) == 'cmmi10.ttf'

    # Smoketest get_charmap, which isn't used internally anymore
    font = get_font(font)
    cmap = font.get_charmap()
    assert len(cmap) == 131