Пример #1
0
    def _startProcess(self):
        executable = sys.executable
        env = os.environ

        twistdBinaries = procutils.which("twistd2.4") + procutils.which("twistd")
        if not twistdBinaries:
            return defer.fail(RuntimeError("Couldn't find twistd to start subprocess"))
        twistd = twistdBinaries[0]

        setsid = procutils.which("setsid")

        self.connector = JuiceConnector(self.juice, self)

        args = [
            sys.executable,
            twistd,
            '--logfile=%s' % (self.logPath,)]

        if not runtime.platform.isWindows():
            args.append('--pidfile=%s' % (self.pidPath,))

        args.extend(['-noy',
                     self.tacPath])

        if setsid:
            args = ['setsid'] + args
            executable = setsid[0]

        self.process = process.spawnProcess(
            self.connector, executable, tuple(args), env=env)
Пример #2
0
    def test_path_setup(self):
        """Validate that the path allows finding the executable."""
        from twisted.python.procutils import which
        open_port_exe = which("open-port")
        self.assertTrue(open_port_exe)
        self.assertTrue(open_port_exe[0].endswith("open-port"))

        close_port_exe = which("close-port")
        self.assertTrue(close_port_exe)
        self.assertTrue(close_port_exe[0].endswith("close-port"))
Пример #3
0
def check_for_ghostscript():
    """
    Check and return path to ghostscript, returns None
    if is not installed.
    """
    from twisted.python.procutils import which
    if which("gs") == []:
        print "Ghostscript is not installed."
        return None
    return which("gs")[0]
 def get_disconnection_args(self, dialer):
     assert dialer.binary == 'wvdial'
     
     killall_path = which('killall')[0]
     if not self.privileges_needed:
         return [killall_path, 'pppd', 'wvdial']
     else:
         gksudo_name = self.abstraction['gksudo_name']
         gksudo_path = which(gksudo_name)[0]
         return [gksudo_path, killall_path, 'pppd', 'wvdial']
Пример #5
0
    def get_disconnection_args(self, dialer):
        assert dialer.binary == "wvdial"

        killall_path = which("killall")[0]
        if not self.privileges_needed:
            return [killall_path, "wvdial"]

        gksudo_name = self.abstraction["gksudo_name"]
        gksudo_path = which(gksudo_name)[0]
        args = " ".join([killall_path, "wvdial"])
        return [gksudo_path, "-c", args]
Пример #6
0
def _synchronously_find_addresses_via_config():
    # originally by Greg Smith, hacked by Zooko and then Daira

    # We don't reach here for cygwin.
    if platform == 'win32':
        commands = _win32_commands
    else:
        commands = _unix_commands

    for (pathtotool, args, regex) in commands:
        # If pathtotool is a fully qualified path then we just try that.
        # If it is merely an executable name then we use Twisted's
        # "which()" utility and try each executable in turn until one
        # gives us something that resembles a dotted-quad IPv4 address.

        if os.path.isabs(pathtotool):
            exes_to_try = [pathtotool]
        else:
            exes_to_try = which(pathtotool)

        for exe in exes_to_try:
            try:
                addresses = _query(exe, args, regex)
            except Exception:
                addresses = []
            if addresses:
                return addresses

    return []
Пример #7
0
        def locateCommand(name, cmd):
            for found in which(cmd):
                return found

            raise InternalDataStoreError(
                "Unable to locate {} command: {}".format(name, cmd)
            )
 def test_whichWithoutPATH(self):
     """
     Test that if C{os.environ} does not have a C{'PATH'} key,
     L{procutils.which} returns an empty list.
     """
     del os.environ['PATH']
     self.assertEqual(procutils.which("executable"), [])
Пример #9
0
def run_once(executable, *args):
    """
    Runs a command, without looking at its output or return value.
    Returns a Deferred or None.
    """
    from twisted.internet import reactor
    global _original_environment_variables
    def _cb(result):
        #print(result)
        pass
    try:
        executable = procutils.which(executable)[0]
    except IndexError:
        log.error("Could not find executable %s" % (executable))
        return None
    else:
        env = _original_environment_variables
        for k in ["GTK2_RC_FILES", "GTK_RC_FILES"]:
            if env.has_key(k):
                log.info("%s=%s" % (k, env[k]))
        log.info("$ %s %s" % (executable, " ".join(list(args))))
        log.debug("ENV=%s" % (env))
        d = utils.getProcessValue(executable, args, env, '.', reactor)
        d.addCallback(_cb)
        return d
Пример #10
0
    def test_reExecService(self):
        """
        Verify that sending a HUP to the test reexec.tac causes startService
        and stopService to be called again by counting the number of times
        START and STOP appear in the process output.
        """
        # Inherit the reactor used to run trial
        reactorArg = "--reactor=select"
        for arg in sys.argv:
            if arg.startswith("--reactor"):
                reactorArg = arg
                break

        tacFilePath = os.path.join(os.path.dirname(__file__), "reexec.tac")
        twistd = which("twistd")[0]
        deferred = Deferred()
        proc = reactor.spawnProcess(
            CapturingProcessProtocol(deferred, None),
            sys.executable,
            [sys.executable, "-W", "ignore", twistd, reactorArg, "-n", "-y", tacFilePath],
            env=os.environ,
        )
        reactor.callLater(3, proc.signalProcess, "HUP")
        reactor.callLater(6, proc.signalProcess, "TERM")
        output = yield deferred
        self.assertEquals(output.count("START"), 2)
        self.assertEquals(output.count("STOP"), 2)
Пример #11
0
def find_exe(exename):
    """
    Look for something named exename or exename + ".py".

    This is a kludge.

    @return: a list containing one element which is the path to the exename
        (if it is thought to be executable), or else the first element being
        sys.executable and the second element being the path to the
        exename + ".py", or else return False if one can't be found
    """
    warnings.warn("deprecated", DeprecationWarning)
    exes = which(exename)
    exe = exes and exes[0]
    if not exe:
        exe = os.path.join(sys.prefix, 'scripts', exename + '.py')
    if os.path.exists(exe):
        path, ext = os.path.splitext(exe)
        if ext.lower() in [".exe", ".bat",]:
            cmd = [exe,]
        else:
            cmd = [sys.executable, exe,]
        return cmd
    else:
        return False
Пример #12
0
def list_cameras():
    """
    Calls the Deferred with the dict of devices as argument.

    @rtype: Deferred
    """
    def _cb(text, deferred):
        #print text
        ret = _parse_milhouse_list_cameras(text)
        deferred.callback(ret)

    def _eb(reason, deferred):
        deferred.errback(reason)
        print("Error listing cameras: %s" % (reason))

    command_name = "milhouse"
    args = ['--list-v4l2']
    try:
        executable = procutils.which(command_name)[0] # gets the executable
    except IndexError:
        return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
    deferred = defer.Deferred()
    d = utils.getProcessOutput(executable, args=args, env=os.environ, errortoo=True) # errortoo puts stderr in output
    d.addCallback(_cb, deferred)
    d.addErrback(_eb, deferred)
    return deferred
Пример #13
0
def jackd_get_infos():
    """
    Calls jack-info to retrieve info about jackd servers.

    Returns a Deferred whose result is list of dict:
    [{
    'period': 1024,
    'rate': 44100,
    'latency': 32
    }]
    @rtype: Deferred
    """
    def _cb(text, deferred):
        #print text
        ret = _parse_jack_info(text)
        deferred.callback(ret)

    def _eb(reason, deferred):
        deferred.errback(reason)
        print("Error listing jackd servers: %s" % (reason))

    command_name = "jack-info"
    args = []
    try:
        executable = procutils.which(command_name)[0] # gets the executable
    except IndexError:
        return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
    deferred = defer.Deferred()
    d = utils.getProcessOutput(executable, args=args, env=os.environ, errortoo=True) # errortoo puts stderr in output
    d.addCallback(_cb, deferred)
    d.addErrback(_eb, deferred)
    return deferred
Пример #14
0
def xvideo_extension_is_present():
    """
    Checks for XV extension.
    Result is boolean.

    @rtype: Deferred
    """
    def _cb(result, deferred):
        ret = True
        for line in result.splitlines():
            if line.find("no adaptors present") != -1: # Hardy
                ret = False
            if line.find("no adaptor present") != -1: # Karmic
                ret = False
        deferred.callback(ret)

    def _eb(reason, deferred):
        deferred.errback(reason)

    command_name = "xvinfo"
    try:
        executable = procutils.which(command_name)[0] # gets the executable
    except IndexError:
        return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
    deferred = defer.Deferred()
    d = utils.getProcessOutput(executable, env=os.environ)
    d.addCallback(_cb, deferred)
    d.addErrback(_eb, deferred)
    return deferred
Пример #15
0
    def assertExecuteWithKexAlgorithm(self, keyExchangeAlgo):
        """
        Call execute() method of L{OpenSSHClientMixin} with an ssh option that
        forces the exclusive use of the key exchange algorithm specified by
        keyExchangeAlgo

        @type keyExchangeAlgo: L{str}
        @param keyExchangeAlgo: The key exchange algorithm to use

        @return: L{defer.Deferred}
        """
        kexAlgorithms = []
        try:
            output = subprocess.check_output([which('ssh')[0], '-Q', 'kex'],
                                             stderr=subprocess.STDOUT)
            if not isinstance(output, str):
                output = output.decode("utf-8")
            kexAlgorithms = output.split()
        except:
            pass

        if keyExchangeAlgo not in kexAlgorithms:
            raise unittest.SkipTest(
                "{} not supported by ssh client".format(
                    keyExchangeAlgo))

        d = self.execute('echo hello', ConchTestOpenSSHProcess(),
                         '-oKexAlgorithms=' + keyExchangeAlgo)
        return d.addCallback(self.assertEqual, b'hello\n')
Пример #16
0
def _synchronously_find_addresses_via_config():
    # originally by Greg Smith, hacked by Zooko to conform to Brian's API

    platform = _platform_map.get(sys.platform)
    if not platform:
        raise UnsupportedPlatformError(sys.platform)

    (pathtotool, args, regex,) = _tool_map[platform]

    # If pathtotool is a fully qualified path then we just try that.
    # If it is merely an executable name then we use Twisted's
    # "which()" utility and try each executable in turn until one
    # gives us something that resembles a dotted-quad IPv4 address.

    if os.path.isabs(pathtotool):
        return _query(pathtotool, args, regex)
    else:
        exes_to_try = which(pathtotool)
        for exe in exes_to_try:
            try:
                addresses = _query(exe, args, regex)
            except Exception:
                addresses = []
            if addresses:
                return addresses
        return []
Пример #17
0
def _find_executable(name):
    executables = which(name)
    if executables:
        return executables[0]
    else:
        raise RuntimeError("Couldn't find the executable (%s) in PATH: %s"
                % (name, os.environ.get('PATH', None)))
Пример #18
0
def jack_disconnect(source, sink):
    """
    Calls jack_disconnect with the given arguments.
    Returns a Deferred
    """
    deferred = defer.Deferred()
    def _cb(result):
        if type(result) != int:
            lor.error("The result of calling jack_disconnect should be an int.")
        log.info("jack_disconnect result: " + str(result))
        if result == 0:
            deferred.callback(True)
        else:
            deferred.callback(False)
    
    exec_name = "jack_disconnect"
    try:
        executable = procutils.which(exec_name)[0]
    except IndexError:
        log.error("Could not find executable %s" % (exec_name))
        return defer.succeed(False)
    else:
        args = [source, sink]
        log.info("$ %s %s" % (executable, " ".join(list(args))))
        d = utils.getProcessValue(executable, args, os.environ, '.', reactor)
        d.addCallback(_cb)
        return deferred
Пример #19
0
        def hasPAKT(status):
            if status == 0:
                opts = '-oPubkeyAcceptedKeyTypes=ssh-dss '
            else:
                opts = ''

            process.deferred = defer.Deferred()
            # Pass -F /dev/null to avoid the user's configuration file from
            # being loaded, as it may contain settings that cause our tests to
            # fail or hang.
            cmdline = ('ssh -2 -l testuser -p %i '
                       '-F /dev/null '
                       '-oUserKnownHostsFile=kh_test '
                       '-oPasswordAuthentication=no '
                       # Always use the RSA key, since that's the one in kh_test.
                       '-oHostKeyAlgorithms=ssh-rsa '
                       '-a '
                       '-i dsa_test ') + opts + sshArgs + \
                       ' 127.0.0.1 ' + remoteCommand
            port = self.conchServer.getHost().port
            cmds = (cmdline % port).split()
            encodedCmds = []
            for cmd in cmds:
                if isinstance(cmd, unicode):
                    cmd = cmd.encode("utf-8")
                encodedCmds.append(cmd)
            reactor.spawnProcess(process, which('ssh')[0], encodedCmds)
            return process.deferred
Пример #20
0
    def get_max_channels_in_raw(self):
        """
        Calls deferred with an int as argument
        @rtype: Deferred
        """
        def _cb(text, deferred):
            ret = None
            for i in text.splitlines():
                if "raw supports up to " in i:
                    ret = int(i.split()[-2])
            if ret is None:
                log.error("Could not figure out how many channels in raw are supported.")
                ret = 8
            deferred.callback(ret)

        def _eb(reason, deferred):
            deferred.errback(reason)
            print("Error getting max channels: %s" % (reason))

        command_name = "milhouse"
        args = ['--max-channels']
        try:
            executable = procutils.which(command_name)[0] # gets the executable
        except IndexError:
            return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
        deferred = defer.Deferred()
        d = utils.getProcessOutput(executable, args=args, env=os.environ, errortoo=True) # errortoo puts stderr in output
        d.addCallback(_cb, deferred)
        d.addErrback(_eb, deferred)
        return deferred
Пример #21
0
def list_network_interfaces_addresses():
    """
    Lists network interfaces IP.
    @rtype: Deferred
    """
    def _cb(result, deferred):
        #print 'cb', result
        ret = _parse_ifconfig(result)
        deferred.callback(ret)
        return None
    def _eb(reason, deferred):
        print("Error calling ifconfig.")
        print(reason)
        deferred.errback(reason)
        return None
    command_name = "ifconfig"
    args = []
    try:
        executable = procutils.which(command_name)[0] # gets the executable
    except IndexError:
        return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
    deferred = defer.Deferred()
    d = utils.getProcessOutput(executable, args=args, env=os.environ)
    d.addCallback(_cb, deferred)
    d.addErrback(_eb, deferred)
    return deferred
Пример #22
0
 def startService(self):
     MultiService.startService(self)
     clusterDir = self.dataStoreDirectory.child("cluster")
     workingDir = self.dataStoreDirectory.child("working")
     env = self.env = os.environ.copy()
     env.update(PGDATA=clusterDir.path,
                PGHOST=self.socketDir.path)
     initdb = which("initdb")[0]
     if not self.socketDir.isdir():
         self.socketDir.createDirectory()
     if self.uid and self.gid:
         os.chown(self.socketDir.path, self.uid, self.gid)
     if self.dataStoreDirectory.isdir():
         self.startDatabase()
     else:
         self.dataStoreDirectory.createDirectory()
         workingDir.createDirectory()
         if self.uid and self.gid:
             os.chown(self.dataStoreDirectory.path, self.uid, self.gid)
             os.chown(workingDir.path, self.uid, self.gid)
         dbInited = Deferred()
         reactor.spawnProcess(
             CapturingProcessProtocol(dbInited, None),
             initdb, [initdb], env, workingDir.path,
             uid=self.uid, gid=self.gid,
         )
         def doCreate(result):
             self.startDatabase()
         dbInited.addCallback(doCreate)
Пример #23
0
 def startDatabase(self):
     """
     Start the database and initialize the subservice.
     """
     monitor = _PostgresMonitor(self)
     pg_ctl = which("pg_ctl")[0]
     # check consistency of initdb and postgres?
     reactor.spawnProcess(
         monitor, pg_ctl,
         [
             pg_ctl,
             "start",
             "-l", self.logFile,
             "-w",
             # XXX what are the quoting rules for '-o'?  do I need to repr()
             # the path here?
             "-o", "-c listen_addresses='' -k '%s' -c standard_conforming_strings=on -c shared_buffers=%d -c max_connections=%d"
                 % (self.socketDir.path, self.sharedBuffers, self.maxConnections),
         ],
         self.env,
         uid=self.uid, gid=self.gid,
     )
     self.monitor = monitor
     def gotReady(result):
         self.ready()
     def reportit(f):
         log.err(f)
     self.monitor.completionDeferred.addCallback(
         gotReady).addErrback(reportit)
Пример #24
0
def list_midi_devices():
    """
    Twisted wrapper for _list_x11_displays.
    Result is a dict with keys "input" and "output". The value are dict with ID and name for each device.
    @rtype: Deferred
    """
    deferred = defer.Deferred()
    def _cb(text, deferred):
        #print text
        ret = _parse_miditream_list_devices(text)
        deferred.callback(ret)

    def _eb(reason, deferred):
        deferred.errback(reason)
        log.error("Error listing MIDI devices: %s" % (reason))

    command_name = "midistream"
    args = ['--list-devices']
    try:
        executable = procutils.which(command_name)[0] # gets the executable
    except IndexError:
        return defer.fail(RuntimeError("Could not find command %s" % (command_name)))
    log.debug("$ %s %s" % (executable, args))
    d = utils.getProcessOutput(executable, args=args, env=os.environ, errortoo=True) # errortoo puts stderr in output
    d.addCallback(_cb, deferred)
    d.addErrback(_eb, deferred)
    return deferred
Пример #25
0
def run(argv=None):
    if argv is None:
        argv = sys.argv
    args = []
    exists = os.path.exists
    # munge unix-y path-like args into windows-y path-like args
    for a in argv[1:]:
        if exists(a):
            kept = a
        else:
            # delay the call to cygpath until here, where it's necessary
            _a = cygpath(a)
            # _a may be an existing or new file (new if containing dir exists)
            e = os.path.exists
            if exists(_a) or os.path.isdir(os.path.dirname(_a)):
                kept = _a
            else:
                kept = a
        if ' ' in kept or '\r' in kept or '\n' in kept:
            kept = '"%s"' % (kept,)
        args.append(kept)

    # read from stdin, which is sometimes useful
    si = STARTUPINFO() # I hate boilerplate crap
    si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
    si.dwFlags = win32process.STARTF_USESTDHANDLES


    # clobber $SHELL, which breaks ! commands when set to something Cygwin-y
    os.environ['SHELL'] = os.environ['COMSPEC']
    CreateProcess(None, 
                  r'%s %s' % (which("gvim_.exe")[0], ' '.join(args)),
                  None, None, 1,
                  win32con.CREATE_NO_WINDOW, None, None, si)
    return 1
Пример #26
0
    def test_reactorSelection(self):
        """
        L{AxiomaticStart} optionally takes the name of a reactor and
        installs it instead of the default reactor.
        """
        # Since this process is already hopelessly distant from the state in
        # which I{axiomatic start} operates, it would make no sense to try a
        # functional test of this behavior in this process.  Since the
        # behavior being tested involves lots of subtle interactions between
        # lots of different pieces of code (the reactor might get installed
        # at the end of a ten-deep chain of imports going through as many
        # different projects), it also makes no sense to try to make this a
        # unit test.  So, start a child process and try to use the alternate
        # reactor functionality there.

        here = FilePath(__file__)
        # Try to find it relative to the source of this test.
        bin = here.parent().parent().parent().child("bin")
        axiomatic = bin.child("axiomatic")
        if axiomatic.exists():
            # Great, use that one.
            axiomatic = axiomatic.path
        else:
            # Try to find it on the path, instead.
            axiomatics = which("axiomatic")
            if axiomatics:
                # Great, it was on the path.
                axiomatic = axiomatics[0]
            else:
                # Nope, not there, give up.
                raise SkipTest(
                    "Could not find axiomatic script on path or at %s" % (
                        axiomatic.path,))

        # Create a store for the child process to use and put an item in it.
        # This will force an import of the module that defines that item's
        # class when the child process starts.  The module imports the default
        # reactor at the top-level, making this the worst-case for the reactor
        # selection code.
        storePath = self.mktemp()
        store = Store(storePath)
        SomeItem(store=store)
        store.close()

        # Install select reactor because it available on all platforms, and
        # it is still an error to try to install the select reactor even if
        # the already installed reactor was the select reactor.
        argv = [
            sys.executable,
            axiomatic, "-d", storePath,
            "start", "--reactor", "select", "-n"]
        expected = [
            "reactor class: twisted.internet.selectreactor.SelectReactor.",
            "reactor class: <class 'twisted.internet.selectreactor.SelectReactor'>"]
        proto, complete = AxiomaticStartProcessProtocol.protocolAndDeferred(expected)

        environ = os.environ.copy()
        reactor.spawnProcess(proto, sys.executable, argv, env=environ)
        return complete
Пример #27
0
def run_command(main):
    config = Options()
    config.parseOptions()

    command = config["command"]
    if "/" in command:
        # don't search
        exe = command
    else:
        executables = which(command)
        if not executables:
            raise ValueError("unable to find '%s' in PATH (%s)" %
                             (command, os.environ.get("PATH")))
        exe = executables[0]

    pw = os.environ.get("PYTHONWARNINGS")
    DDW = "default::DeprecationWarning"
    if pw != DDW:
        print "note: $PYTHONWARNINGS is '%s', not the expected %s" % (pw, DDW)
        sys.stdout.flush()

    pp = RunPP()
    pp.d = defer.Deferred()
    pp.stdout = io.BytesIO()
    pp.stderr = io.BytesIO()
    reactor.spawnProcess(pp, exe, [exe] + config["args"], env=None)
    (signal, rc) = yield pp.d

    # maintain ordering, but ignore duplicates (for some reason, either the
    # 'warnings' module or twisted.python.deprecate isn't quashing them)
    already = set()
    warnings = []
    def add(line):
        if line in already:
            return
        already.add(line)
        warnings.append(line)

    pp.stdout.seek(0)
    for line in pp.stdout.readlines():
        if "DeprecationWarning" in line:
            add(line) # includes newline

    pp.stderr.seek(0)
    for line in pp.stderr.readlines():
        if "DeprecationWarning" in line:
            add(line)

    if warnings:
        if config["warnings"]:
            with open(config["warnings"], "wb") as f:
                print >>f, "".join(warnings)
        print "ERROR: %d deprecation warnings found" % len(warnings)
        sys.exit(1)

    print "no deprecation warnings"
    if signal:
        sys.exit(signal)
    sys.exit(rc)
Пример #28
0
 def testLog(self):
     exes = which("buildbot")
     if not exes:
         raise unittest.SkipTest("Buildbot needs to be installed")
     self.buildbotexe = exes[0]
     d = getProcessValue(self.buildbotexe, ["create-master", "--log-size=1000", "--log-count=2", "master"])
     d.addCallback(self._master_created)
     return d
 def testWhich(self):
     j = os.path.join
     paths = procutils.which("executable")
     expectedPaths = [j(self.foobaz, "executable"),
                      j(self.bazfoo, "executable")]
     if runtime.platform.isWindows():
         expectedPaths.append(j(self.bazbar, "executable"))
     self.assertEquals(paths, expectedPaths)
Пример #30
0
 def spawn(cls, command, *args, **kwargs):
     protocol = cls(*args, **kwargs)
     protocol.reactor.spawnProcess(
         processProtocol=protocol,
         executable=which(command[0])[0],
         args=command,
     )
     return protocol.done
Пример #31
0
            ["..", "..", "baz", "quux"],
        )

    def test_filePathDeltaSimilarEndElements(self):
        """
        L{filePathDelta} doesn't take into account final elements when
        comparing 2 paths, but stops at the first difference.
        """
        self.assertEqual(
            filePathDelta(FilePath("/foo/bar/bar/spam"),
                          FilePath("/foo/bar/baz/spam")),
            ["..", "..", "baz", "spam"],
        )


@skipIf(not which("sphinx-build"), "Sphinx not available.")
class SphinxBuilderTests(TestCase):
    """
    Tests for L{SphinxBuilder}.

    @note: This test case depends on twisted.web, which violates the standard
        Twisted practice of not having anything in twisted.python depend on
        other Twisted packages and opens up the possibility of creating
        circular dependencies.  Do not use this as an example of how to
        structure your dependencies.

    @ivar builder: A plain L{SphinxBuilder}.

    @ivar sphinxDir: A L{FilePath} representing a directory to be used for
        containing a Sphinx project.
Пример #32
0
    else:
        win_7z_path = os.path.join(
            winreg.QueryValueEx(hkey, 'Path')[0], '7z.exe')
        winreg.CloseKey(hkey)
        win_7z_exes.insert(1, win_7z_path)

    switch_7z = 'x -y'
    # Future suport:
    # 7-zip cannot extract tar.* with single command.
    #    ".tar.gz", ".tgz",
    #    ".tar.bz2", ".tbz",
    #    ".tar.lzma", ".tlz",
    #    ".tar.xz", ".txz",
    exts_7z = ['.rar', '.zip', '.tar', '.7z', '.xz', '.lzma']
    for win_7z_exe in win_7z_exes:
        if which(win_7z_exe):
            EXTRACT_COMMANDS = dict.fromkeys(exts_7z, [win_7z_exe, switch_7z])
            break
else:
    required_cmds = [
        'unrar', 'unzip', 'tar', 'unxz', 'unlzma', '7zr', 'bunzip2'
    ]
    # Possible future suport:
    # gunzip: gz (cmd will delete original archive)
    # the following do not extract to dest dir
    # ".xz": ["xz", "-d --keep"],
    # ".lzma": ["xz", "-d --format=lzma --keep"],
    # ".bz2": ["bzip2", "-d --keep"],

    EXTRACT_COMMANDS = {
        '.rar': ['unrar', 'x -o- -y'],
Пример #33
0
def find_program(program):
    possibilities = which(program)
    if not possibilities:
        abort("Could not find '%s' in PATH", program)
    return possibilities[0]
Пример #34
0
"""Functional tests for the ``flocker-volume`` command line tool."""

from subprocess import check_output, Popen, PIPE
import json
from unittest import skipUnless

from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.python.procutils import which

from ... import __version__
from ...testtools import skip_on_broken_permissions, attempt_effective_uid
from ..testtools import create_zfs_pool

_require_installed = skipUnless(which("flocker-volume"),
                                "flocker-volume not installed")


def run(*args):
    """Run ``flocker-volume`` with the given arguments.

    :param args: Additional command line arguments as ``bytes``.

    :return: The output of standard out.
    :raises: If exit code is not 0.
    """
    return check_output([b"flocker-volume"] + list(args))


def run_expecting_error(*args):
Пример #35
0
class FlockerClientTests(make_clientv1_tests()):
    """
    Interface tests for ``FlockerClient``.
    """
    @skipUnless(platform.isLinux(),
                "flocker-node-era currently requires Linux.")
    @skipUnless(which("flocker-node-era"),
                "flocker-node-era needs to be in $PATH.")
    def create_client(self):
        """
        Create a new ``FlockerClient`` instance pointing at a running control
        service REST API.

        :return: ``FlockerClient`` instance.
        """
        clock = Clock()
        _, self.port = find_free_port()
        self.persistence_service = ConfigurationPersistenceService(
            clock, FilePath(self.mktemp()))
        self.persistence_service.startService()
        self.cluster_state_service = ClusterStateService(reactor)
        self.cluster_state_service.startService()
        source = ChangeSource()
        # Prevent nodes being deleted by the state wiper.
        source.set_last_activity(reactor.seconds())
        self.era = UUID(check_output(["flocker-node-era"]))
        self.cluster_state_service.apply_changes_from_source(
            source=source,
            changes=[UpdateNodeStateEra(era=self.era, uuid=self.node_1.uuid)] +
            [
                NodeState(uuid=node.uuid, hostname=node.public_address)
                for node in [self.node_1, self.node_2]
            ],
        )
        self.addCleanup(self.cluster_state_service.stopService)
        self.addCleanup(self.persistence_service.stopService)
        credential_set, _ = get_credential_sets()
        credentials_path = FilePath(self.mktemp())
        credentials_path.makedirs()

        api_service = create_api_service(
            self.persistence_service,
            self.cluster_state_service,
            TCP4ServerEndpoint(reactor, self.port, interface=b"127.0.0.1"),
            rest_api_context_factory(
                credential_set.root.credential.certificate,
                credential_set.control),
            # Use consistent fake time for API results:
            clock)
        api_service.startService()
        self.addCleanup(api_service.stopService)

        credential_set.copy_to(credentials_path, user=True)
        return FlockerClient(reactor, b"127.0.0.1", self.port,
                             credentials_path.child(b"cluster.crt"),
                             credentials_path.child(b"user.crt"),
                             credentials_path.child(b"user.key"))

    def synchronize_state(self):
        deployment = self.persistence_service.get()
        # No IP address known, so use UUID for hostname
        node_states = [
            NodeState(uuid=node.uuid,
                      hostname=unicode(node.uuid),
                      applications=node.applications,
                      manifestations=node.manifestations,
                      paths={
                          manifestation.dataset_id:
                          FilePath(b"/flocker").child(
                              bytes(manifestation.dataset_id))
                          for manifestation in node.manifestations.values()
                      },
                      devices={}) for node in deployment.nodes
        ]
        self.cluster_state_service.apply_changes(node_states)

    def get_configuration_tag(self):
        return self.persistence_service.configuration_hash()

    @capture_logging(None)
    def test_logging(self, logger):
        """
        Successful HTTP requests are logged.
        """
        dataset_id = uuid4()
        d = self.client.create_dataset(primary=self.node_1.uuid,
                                       maximum_size=None,
                                       dataset_id=dataset_id)
        d.addCallback(lambda _: assertHasAction(
            self, logger, _LOG_HTTP_REQUEST, True,
            dict(url=b"https://127.0.0.1:{}/v1/configuration/datasets".format(
                self.port),
                 method=u"POST",
                 request_body=dict(primary=unicode(self.node_1.uuid),
                                   metadata={},
                                   dataset_id=unicode(dataset_id))),
            dict(response_body=dict(primary=unicode(self.node_1.uuid),
                                    metadata={},
                                    deleted=False,
                                    dataset_id=unicode(dataset_id)))))
        return d

    @capture_logging(None)
    def test_cross_process_logging(self, logger):
        """
        Eliot tasks can be traced from the HTTP client to the API server.
        """
        self.patch(rest_api, "_logger", logger)
        my_action = ActionType("my_action", [], [])
        with my_action():
            d = self.client.create_dataset(primary=self.node_1.uuid)

        def got_response(_):
            parent = LoggedAction.ofType(logger.messages, my_action)[0]
            child = LoggedAction.ofType(logger.messages, REQUEST)[0]
            self.assertIn(child, list(parent.descendants()))

        d.addCallback(got_response)
        return d

    @capture_logging(lambda self, logger: assertHasAction(
        self, logger, _LOG_HTTP_REQUEST, False,
        dict(url=b"https://127.0.0.1:{}/v1/configuration/datasets".format(
            self.port),
             method=u"POST",
             request_body=dict(primary=unicode(self.node_1.uuid),
                               maximum_size=u"notint",
                               metadata={})),
        {u'exception': u'flocker.apiclient._client.ResponseError'}))
    def test_unexpected_error(self, logger):
        """
        If the ``FlockerClient`` receives an unexpected HTTP response code it
        returns a ``ResponseError`` failure.
        """
        d = self.client.create_dataset(primary=self.node_1.uuid,
                                       maximum_size=u"notint")
        self.assertFailure(d, ResponseError)
        d.addCallback(lambda exc: self.assertEqual(exc.code, BAD_REQUEST))
        return d

    def test_unset_primary(self):
        """
        If the ``FlockerClient`` receives a dataset state where primary is
        ``None`` it parses it correctly.
        """
        dataset_id = uuid4()
        self.cluster_state_service.apply_changes([
            NonManifestDatasets(
                datasets={
                    unicode(dataset_id):
                    ModelDataset(dataset_id=unicode(dataset_id)),
                })
        ])
        d = self.client.list_datasets_state()
        d.addCallback(lambda states: self.assertEqual([
            DatasetState(dataset_id=dataset_id,
                         primary=None,
                         maximum_size=None,
                         path=None)
        ], states))
        return d

    def test_this_node_uuid_retry(self):
        """
        ``this_node_uuid`` retries if the node UUID is unknown.
        """
        # Pretend that the era for node 1 is something else; first try at
        # getting node UUID for real era will therefore fail:
        self.cluster_state_service.apply_changes(
            [UpdateNodeStateEra(era=uuid4(), uuid=self.node_1.uuid)])

        # When we lookup the DeploymentState the first time we'll set the
        # value to the correct one, so second try should succeed:
        def as_deployment(original=self.cluster_state_service.as_deployment):
            result = original()
            self.cluster_state_service.apply_changes(changes=[
                UpdateNodeStateEra(era=self.era, uuid=self.node_1.uuid)
            ])
            return result

        self.patch(self.cluster_state_service, "as_deployment", as_deployment)

        d = self.client.this_node_uuid()
        d.addCallback(self.assertEqual, self.node_1.uuid)
        return d

    def test_this_node_uuid_no_retry_on_other_responses(self):
        """
        ``this_node_uuid`` doesn't retry on unexpected responses.
        """
        # Cause 500 errors to be raised by the API endpoint:
        self.patch(self.cluster_state_service, "as_deployment", lambda: 1 / 0)
        return self.assertFailure(self.client.this_node_uuid(), ResponseError)
Пример #36
0
 def test_path_setup(self):
     """Validate that the path allows finding the executable."""
     from twisted.python.procutils import which
     exe = which("relation-get")
     self.assertTrue(exe)
     self.assertTrue(exe[0].endswith("relation-get"))
Пример #37
0
 def __init__(self, treetop, branch, repository):
     self.treetop = treetop
     self.repository = repository
     self.branch = branch
     self.exe = which(self.vcexe)[0]
Пример #38
0
    'MONGO_APPLICATION', 'MONGO_IMAGE', 'require_flocker_cli',
    ]

# The port on which the acceptance testing nodes make docker available
REMOTE_DOCKER_PORT = 2375

# XXX The MONGO_APPLICATION will have to be removed because it does not match
# the tutorial yml files, and the yml should be testably the same:
# https://github.com/ClusterHQ/flocker/issues/947
MONGO_APPLICATION = u"mongodb-example-application"
MONGO_IMAGE = u"clusterhq/mongodb"

# XXX This assumes that the desired version of flocker-cli has been installed.
# Instead, the testing environment should do this automatically.
# See https://github.com/ClusterHQ/flocker/issues/901.
require_flocker_cli = skipUnless(which("flocker-deploy"),
                                 "flocker-deploy not installed")

require_mongo = skipUnless(
    PYMONGO_INSTALLED, "PyMongo not installed")


def _run_SSH(port, user, node, command, input, key=None):
    """
    Run a command via SSH.

    :param int port: Port to connect to.
    :param bytes user: User to run the command as.
    :param bytes node: Node to run command on.
    :param command: Command to run.
    :type command: ``list`` of ``bytes``.
Пример #39
0
    def deliverJob(self):
        # returns a Deferred that fires when the job has been delivered
        if self.connect == "ssh":
            tryhost = self.getopt("host")
            tryport = self.getopt("port")
            tryuser = self.getopt("username")
            trydir = self.getopt("jobdir")
            buildbotbin = self.getopt("buildbotbin")
            ssh_command = self.getopt("ssh")
            if not ssh_command:
                ssh_commands = which("ssh")
                if not ssh_commands:
                    raise RuntimeError(
                        "couldn't find ssh executable, make sure "
                        "it is available in the PATH")

                argv = [ssh_commands[0]]
            else:
                # Split the string on whitespace to allow passing options in
                # ssh command too, but preserving whitespace inside quotes to
                # allow using paths with spaces in them which is common under
                # Windows. And because Windows uses backslashes in paths, we
                # can't just use shlex.split there as it would interpret them
                # specially, so do it by hand.
                if runtime.platformType == 'win32':
                    # Note that regex here matches the arguments, not the
                    # separators, as it's simpler to do it like this. And then we
                    # just need to get all of them together using the slice and
                    # also remove the quotes from those that were quoted.
                    argv = [
                        string.strip(a, '"') for a in re.split(
                            r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]
                    ]
                else:
                    # Do use standard tokenization logic under POSIX.
                    argv = shlex.split(ssh_command)

            if tryuser:
                argv += ["-l", tryuser]

            if tryport:
                argv += ["-p", tryport]

            argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir]
            pp = RemoteTryPP(self.jobfile)
            reactor.spawnProcess(pp, argv[0], argv, os.environ)
            d = pp.d
            return d
        if self.connect == "pb":
            user = self.getopt("username")
            passwd = self.getopt("passwd")
            master = self.getopt("master")
            tryhost, tryport = master.split(":")
            tryport = int(tryport)
            f = pb.PBClientFactory()
            d = f.login(credentials.UsernamePassword(user, passwd))
            reactor.connectTCP(tryhost, tryport, f)
            d.addCallback(self._deliverJob_pb)
            return d
        raise RuntimeError(
            "unknown connecttype '%s', should be 'ssh' or 'pb'" % self.connect)
Пример #40
0
    def start_client(self):
        # this returns a Deferred that fires with the client's control.furl
        log.msg("MAKING CLIENT")
        # self.testdir is an absolute Unicode path
        clientdir = self.clientdir = os.path.join(self.testdir, u"client")
        clientdir_str = clientdir.encode(get_filesystem_encoding())
        quiet = StringIO()
        create_node.create_node({'basedir': clientdir}, out=quiet)
        log.msg("DONE MAKING CLIENT")
        # now replace tahoe.cfg
        # set webport=0 and then ask the node what port it picked.
        f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
        f.write("[node]\n"
                "web.port = tcp:0:interface=127.0.0.1\n"
                "[client]\n"
                "introducer.furl = %s\n"
                "shares.happy = 1\n"
                "[storage]\n" % (self.introducer_furl, ))

        if self.mode in ("upload-self", "receive"):
            # accept and store shares, to trigger the memory consumption bugs
            pass
        else:
            # don't accept any shares
            f.write("readonly = true\n")
            ## also, if we do receive any shares, throw them away
            #f.write("debug_discard = true")
        if self.mode == "upload-self":
            pass
        f.close()
        self.keepalive_file = os.path.join(clientdir,
                                           client.Client.EXIT_TRIGGER_FILE)
        # now start updating the mtime.
        self.touch_keepalive()
        ts = internet.TimerService(1.0, self.touch_keepalive)
        ts.setServiceParent(self.sparent)

        pp = ClientWatcher()
        self.proc_done = pp.d = defer.Deferred()
        logfile = os.path.join(self.basedir, "client.log")
        tahoes = procutils.which("tahoe")
        if not tahoes:
            raise RuntimeError("unable to find a 'tahoe' executable")
        cmd = [tahoes[0], "run", ".", "-l", logfile]
        env = os.environ.copy()
        self.proc = reactor.spawnProcess(pp,
                                         cmd[0],
                                         cmd,
                                         env,
                                         path=clientdir_str)
        log.msg("CLIENT STARTED")

        # now we wait for the client to get started. we're looking for the
        # control.furl file to appear.
        furl_file = os.path.join(clientdir, "private", "control.furl")
        url_file = os.path.join(clientdir, "node.url")

        def _check():
            if pp.ended and pp.ended.value.status != 0:
                # the twistd process ends normally (with rc=0) if the child
                # is successfully launched. It ends abnormally (with rc!=0)
                # if the child cannot be launched.
                raise ChildDidNotStartError(
                    "process ended while waiting for startup")
            return os.path.exists(furl_file)

        d = self.poll(_check, 0.1)

        # once it exists, wait a moment before we read from it, just in case
        # it hasn't finished writing the whole thing. Ideally control.furl
        # would be created in some atomic fashion, or made non-readable until
        # it's ready, but I can't think of an easy way to do that, and I
        # think the chances that we'll observe a half-write are pretty low.
        def _stall(res):
            d2 = defer.Deferred()
            reactor.callLater(0.1, d2.callback, None)
            return d2

        d.addCallback(_stall)

        def _read(res):
            # read the node's URL
            self.webish_url = open(url_file, "r").read().strip()
            if self.webish_url[-1] == "/":
                # trim trailing slash, since the rest of the code wants it gone
                self.webish_url = self.webish_url[:-1]
            f = open(furl_file, "r")
            furl = f.read()
            return furl.strip()

        d.addCallback(_read)
        return d
Пример #41
0
from subprocess import check_output, CalledProcessError
from unittest import skipUnless

from twisted.python.procutils import which
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase

from ...testtools import create_ssh_server, create_ssh_agent
from .._sshconfig import OpenSSHConfiguration
from ...node import Deployment, Node

from ..script import DeployScript

from ... import __version__

_require_installed = skipUnless(which("flocker-deploy"),
                                "flocker-deploy not installed")


class FlockerDeployTests(TestCase):
    """
    Tests for ``flocker-deploy``.
    """
    @_require_installed
    def setUp(self):
        pass

    def test_version(self):
        """``flocker-deploy --version`` returns the current version."""
        result = check_output([b"flocker-deploy"] + [b"--version"])
        self.assertEqual(result, b"%s\n" % (__version__, ))
Пример #42
0
 def initdb(self):
     return which(self._initdb)[0]
Пример #43
0
 def pgCtl(self):
     """
     Locate the path to pg_ctl.
     """
     return which(self._pgCtl)[0]
Пример #44
0
        iptables = LoggedAction.ofType(logger.messages, IPTABLES)
        case.assertNotEqual(iptables, [])

    return validate


_environment_skip = skipUnless(
    is_environment_configured(),
    "Cannot test port forwarding without suitable test environment.")

_dependency_skip = skipUnless(
    NOMENCLATURE_INSTALLED,
    "Cannot test port forwarding without nomenclature installed.")

_iptables_skip = skipUnless(
    which(b"iptables-save"),
    "Cannot set up isolated environment without iptables-save.")


class GetIPTablesTests(TestCase):
    """
    Tests for the iptables rule preserving helper.
    """
    @_dependency_skip
    @_environment_skip
    def test_get_iptables_rules(self):
        """
        :py:code:`get_iptables_rules()` returns the same list of
        bytes as long as no rules have changed.
        """
        first = get_iptables_rules()
Пример #45
0
 def _checkForRpmbuild(self):
     """
     tap2rpm requires rpmbuild; skip tests if rpmbuild is not present.
     """
     if not procutils.which("rpmbuild"):
         raise SkipTest("rpmbuild must be present to test tap2rpm")
Пример #46
0
                     'HostKeyAlgorithms=ssh-rsa', '-o', 'Port=%i' % (port, ),
                     '-b', fn, '[email protected]')
            return args

        def check(result):
            self.assertEqual(result[2], 0)
            for i in [
                    'testDirectory', 'testRemoveFile', 'testRenameFile',
                    'testfile1'
            ]:
                self.assertIn(i, result[0])

        d.addCallback(hasPAKT)
        d.addCallback(lambda args: getProcessOutputAndValue('sftp', args))
        return d.addCallback(check)


if None in (unix, cryptography, pyasn1,
            interfaces.IReactorProcess(reactor, None)):
    if _reason is None:
        _reason = "don't run w/o spawnProcess or cryptography or pyasn1"
    OurServerCmdLineClientTests.skip = _reason
    OurServerBatchFileTests.skip = _reason
    OurServerSftpClientTests.skip = _reason
    StdioClientTests.skip = _reason
    SSHSessionTests.skip = _reason
else:
    from twisted.python.procutils import which
    if not which('sftp'):
        OurServerSftpClientTests.skip = "no sftp command-line client available"
from __future__ import generators

import sys
import zipfile
import py_compile

# we're going to ignore failures to import tkinter and fall back
# to using the console if the required dll is not found

# Scary kludge to work around tk84.dll bug:
# https://sourceforge.net/tracker/index.php?func=detail&aid=814654&group_id=5470&atid=105470
# Without which(): you get a windows missing-dll popup message
from twisted.python.procutils import which
tkdll = 'tk84.dll'
if which(tkdll) or which('DLLs/%s' % tkdll):
    try:
        import Tkinter
        from Tkinter import *
        from twisted.internet import tksupport
    except ImportError:
        pass

# twisted
from twisted.internet import reactor, defer
from twisted.python import failure, log, zipstream, util, usage, log
# local
import os.path


class ProgressBar:
Пример #48
0
        win_7z_path = os.path.join(
            winreg.QueryValueEx(hkey, 'Path')[0], '7z.exe')
        winreg.CloseKey(hkey)
        win_7z_exes.insert(1, win_7z_path)

    switch_7z = 'x -y'
    ## Future suport:
    ## 7-zip cannot extract tar.* with single command.
    #    ".tar.gz", ".tgz",
    #    ".tar.bz2", ".tbz",
    #    ".tar.lzma", ".tlz",
    #    ".tar.xz", ".txz",
    exts_7z = ['.rar', '.zip', '.tar', '.7z', '.xz', '.lzma']

    for win_7z_exe in win_7z_exes:
        if which(win_7z_exe):
            EXTRACT_COMMANDS = dict.fromkeys(exts_7z, [win_7z_exe, switch_7z])
            break

else:
    required_commands = [
        'unrar',
        'unzip',
        'tar',
        'unxz',
        'unlzma',
        '7zr',
        'bunzip2',
    ]
    # Possible future support:
    # gunzip: gz (cmd will delete original archive)
Пример #49
0
    def execute(self, remoteCommand, process, sshArgs=""):
        """
        Connects to the SSH server started in L{ConchServerSetupMixin.setUp} by
        running the 'ssh' command line tool.

        @type remoteCommand: str
        @param remoteCommand: The command (with arguments) to run on the
        remote end.

        @type process: L{ConchTestOpenSSHProcess}

        @type sshArgs: str
        @param sshArgs: Arguments to pass to the 'ssh' process.

        @return: L{defer.Deferred}
        """
        # PubkeyAcceptedKeyTypes does not exist prior to OpenSSH 7.0 so we
        # first need to check if we can set it. If we can, -V will just print
        # the version without doing anything else; if we can't, we will get a
        # configuration error.
        d = getProcessValue(
            which("ssh")[0], ("-o", "PubkeyAcceptedKeyTypes=ssh-dss", "-V")
        )

        def hasPAKT(status):
            if status == 0:
                opts = "-oPubkeyAcceptedKeyTypes=ssh-dss "
            else:
                opts = ""

            process.deferred = defer.Deferred()
            # Pass -F /dev/null to avoid the user's configuration file from
            # being loaded, as it may contain settings that cause our tests to
            # fail or hang.
            cmdline = (
                (
                    "ssh -2 -l testuser -p %i "
                    "-F /dev/null "
                    "-oUserKnownHostsFile=kh_test "
                    "-oPasswordAuthentication=no "
                    # Always use the RSA key, since that's the one in kh_test.
                    "-oHostKeyAlgorithms=ssh-rsa "
                    "-a "
                    "-i dsa_test "
                )
                + opts
                + sshArgs
                + " 127.0.0.1 "
                + remoteCommand
            )
            port = self.conchServer.getHost().port
            cmds = (cmdline % port).split()
            encodedCmds = []
            for cmd in cmds:
                if isinstance(cmd, str):
                    cmd = cmd.encode("utf-8")
                encodedCmds.append(cmd)
            reactor.spawnProcess(process, which("ssh")[0], encodedCmds)
            return process.deferred

        return d.addCallback(hasPAKT)
Пример #50
0
        """
        cmds = """-chown 0 missingFile
pwd
exit
"""

        def _cbCheckResult(res):
            self.assertIn(self.testDir.asBytesMode().path, res)

        d = self._getBatchOutput(cmds)
        d.addCallback(_cbCheckResult)
        return d


@skipIf(skipTests, "don't run w/o spawnProcess or cryptography or pyasn1")
@skipIf(not which("ssh"), "no ssh command-line client available")
@skipIf(not which("sftp"), "no sftp command-line client available")
class OurServerSftpClientTests(CFTPClientTestBase):
    """
    Test the sftp server against sftp command line client.
    """

    def setUp(self):
        CFTPClientTestBase.setUp(self)
        return self.startServer()

    def tearDown(self):
        return self.stopServer()

    def test_extendedAttributes(self):
        """
Пример #51
0
class FlockerCATests(make_script_tests(EXECUTABLE)):
    """
    Tests for ``flocker-ca`` script.
    """
    @skipUnless(which(EXECUTABLE), EXECUTABLE + " not installed")
    @skipUnless(which(b"openssl"), "openssl not installed")
    def setUp(self):
        """
        Create a root certificate for the test.
        """
        super(FlockerCATests, self).setUp()
        self.temp_path = FilePath(self.mktemp())
        self.temp_path.makedirs()
        flocker_ca(b"initialize", b"mycluster", cwd=self.temp_path.path)

    def test_initialize(self):
        """
        Test for ``flocker-ca initialize`` command.
        Runs ``flocker-ca initialize`` and calls ``openssl`` to verify the
        generated certificate is a self-signed certificate authority.
        """
        self.assertTrue(
            openssl_verify(b"cluster.crt",
                           b"cluster.crt",
                           cwd=self.temp_path.path))

    def test_control_certificate(self):
        """
        Test for ``flocker-ca create-control-certificate`` command.
        Runs ``flocker-ca initialize`` followed by
        ``flocker-ca create-control-certificate` and calls ``openssl``
        to verify the generated control certificate and private key is
        signed by the previously generated certificate authority.
        """
        flocker_ca(b"create-control-certificate",
                   b"my.example.com",
                   cwd=self.temp_path.path)
        self.assertTrue(
            openssl_verify(b"cluster.crt",
                           b"control-my.example.com.crt",
                           cwd=self.temp_path.path))

    def test_node_certificate(self):
        """
        Test for ``flocker-ca create-node-certificate`` command.
        Runs ``flocker-ca initialize`` followed by
        ``flocker-ca create-node-certificate` and calls ``openssl``
        to verify the generated node certificate and private key is
        signed by the previously generated certificate authority.
        """
        status, output = flocker_ca(b"create-node-certificate",
                                    cwd=self.temp_path.path)
        # Find the generated file name with UUID from the output.
        file_pattern = re.compile("([a-zA-Z0-9\-]*\.crt)")
        file_name = file_pattern.search(output).groups()[0]
        self.assertTrue(
            openssl_verify(b"cluster.crt", file_name, cwd=self.temp_path.path))

    def test_apiuser_certificate(self):
        """
        Test for ``flocker-ca create-api-certificate`` command.
        Runs ``flocker-ca initialize`` followed by
        ``flocker-ca create-api-certificate` and calls ``openssl``
        to verify the generated control certificate and private key is
        signed by the previously generated certificate authority.
        """
        flocker_ca(b"create-api-certificate",
                   b"alice",
                   cwd=self.temp_path.path)
        self.assertTrue(
            openssl_verify(b"cluster.crt",
                           b"alice.crt",
                           cwd=self.temp_path.path))

    def test_help_description(self):
        """
        The output of ``flocker-ca --help`` includes the helptext with
        its original formatting.
        """
        helptext = CAOptions.helptext
        expected = ""
        for line in helptext.splitlines():
            expected = expected + line.strip() + "\n"
        status, output = flocker_ca(b"--help")
        self.assertIn(expected, output)
Пример #52
0
#
# On the devstack guest do the following:
#   cd flocker
#   workon flocker
#
# Then update the branch to match the code you want to test.
#
# Then run these tests:
#
#   sudo /usr/bin/env \
#     FLOCKER_FUNCTIONAL_TEST_CLOUD_CONFIG_FILE=$PWD/acceptance.yml \
#     FLOCKER_FUNCTIONAL_TEST=TRUE \
#     FLOCKER_FUNCTIONAL_TEST_CLOUD_PROVIDER=devstack-openstack \
#     $(type -p trial) \
#     flocker.node.agents.functional.test_cinder.CinderAttachmentTests
require_virtio = skipIf(not which('virsh'),
                        "Tests require the ``virsh`` command.")


@require_backend('openstack')
def cinderblockdeviceapi_for_test(test_case):
    """
    Create a ``CinderBlockDeviceAPI`` instance for use in tests.

    :param TestCase test_case: The test being run.

    :returns: A ``CinderBlockDeviceAPI`` instance.  Any volumes it creates will
        be cleaned up at the end of the test (using ``test_case``\ 's cleanup
        features).
    """
    return get_blockdeviceapi_with_cleanup(test_case)
Пример #53
0
    def test_reactorSelection(self):
        """
        L{AxiomaticStart} optionally takes the name of a reactor and
        installs it instead of the default reactor.
        """
        # Since this process is already hopelessly distant from the state in
        # which I{axiomatic start} operates, it would make no sense to try a
        # functional test of this behavior in this process.  Since the
        # behavior being tested involves lots of subtle interactions between
        # lots of different pieces of code (the reactor might get installed
        # at the end of a ten-deep chain of imports going through as many
        # different projects), it also makes no sense to try to make this a
        # unit test.  So, start a child process and try to use the alternate
        # reactor functionality there.

        here = FilePath(__file__)
        # Try to find it relative to the source of this test.
        bin = here.parent().parent().parent().child("bin")
        axiomatic = bin.child("axiomatic")
        if axiomatic.exists():
            # Great, use that one.
            axiomatic = axiomatic.path
        else:
            # Try to find it on the path, instead.
            axiomatics = which("axiomatic")
            if axiomatics:
                # Great, it was on the path.
                axiomatic = axiomatics[0]
            else:
                # Nope, not there, give up.
                raise SkipTest(
                    "Could not find axiomatic script on path or at %s" %
                    (axiomatic.path, ))

        # Create a store for the child process to use and put an item in it.
        # This will force an import of the module that defines that item's
        # class when the child process starts.  The module imports the default
        # reactor at the top-level, making this the worst-case for the reactor
        # selection code.
        storePath = self.mktemp()
        store = Store(storePath)
        SomeItem(store=store)
        store.close()

        # Install select reactor because it available on all platforms, and
        # it is still an error to try to install the select reactor even if
        # the already installed reactor was the select reactor.
        argv = [
            sys.executable, axiomatic, "-d", storePath, "start", "--reactor",
            "select", "-n"
        ]
        expected = [
            "reactor class: twisted.internet.selectreactor.SelectReactor.",
            "reactor class: <class 'twisted.internet.selectreactor.SelectReactor'>"
        ]
        proto, complete = AxiomaticStartProcessProtocol.protocolAndDeferred(
            expected)

        environ = os.environ.copy()
        reactor.spawnProcess(proto, sys.executable, argv, env=environ)
        return complete
Пример #54
0
# On the devstack guest do the following:
#   cd flocker
#   workon flocker
#
# Then update the branch to match the code you want to test.
#
# Then run these tests:
#
#   sudo /usr/bin/env \
#     FLOCKER_FUNCTIONAL_TEST_CLOUD_CONFIG_FILE=$PWD/acceptance.yml \
#     FLOCKER_FUNCTIONAL_TEST=TRUE \
#     FLOCKER_FUNCTIONAL_TEST_CLOUD_PROVIDER=devstack-openstack \
#     $(type -p trial) \
#     flocker.node.agents.functional.test_cinder.CinderAttachmentTests
require_virtio = skipIf(
    not which('virsh'), "Tests require the ``virsh`` command.")


def cinderblockdeviceapi_for_test(test_case):
    """
    Create a ``CinderBlockDeviceAPI`` instance for use in tests.

    :param TestCase test_case: The test being run.

    :returns: A ``CinderBlockDeviceAPI`` instance.  Any volumes it creates will
        be cleaned up at the end of the test (using ``test_case``\ 's cleanup
        features).
    """
    return get_blockdeviceapi_with_cleanup(test_case, ProviderType.openstack)

Пример #55
0
def flog_binary():
    return which('flogtool')[0]
Пример #56
0
# Copyright Hybrid Logic Ltd.  See LICENSE file for details.
"""
Functional tests for the ``flocker-changestate`` command line tool.
"""

from subprocess import check_output
from unittest import skipUnless

from twisted.python.procutils import which
from twisted.trial.unittest import TestCase

from ... import __version__

_require_installed = skipUnless(which("flocker-changestate"),
                                "flocker-changestate not installed")


class FlockerChangeStateTests(TestCase):
    """Tests for ``flocker-changestate``."""
    @_require_installed
    def test_version(self):
        """
        ``flocker-changestate`` is a command available on the system path
        """
        result = check_output([b"flocker-changestate"] + [b"--version"])
        self.assertEqual(result, b"%s\n" % (__version__, ))


class FlockerReportStateTests(TestCase):
    """Tests for ``flocker-reportstate``."""
    @_require_installed
Пример #57
0
try:
    import pydoctor.driver

    # it might not be installed, or it might use syntax not available in
    # this version of Python.
except (ImportError, SyntaxError):
    pydoctorSkip = "Pydoctor is not present."
else:
    if getattr(pydoctor, "version_info", (0,)) < (0, 1):
        pydoctorSkip = "Pydoctor is too old."
    else:
        pydoctorSkip = skip


if not skip and which("sphinx-build"):
    sphinxSkip = None
else:
    sphinxSkip = "Sphinx not available."


if not skip and which("git"):
    gitVersion = runCommand(["git", "--version"]).split(b" ")[2].split(b".")

    # We want git 2.0 or above.
    if int(gitVersion[0]) >= 2:
        gitSkip = skip
    else:
        gitSkip = "old git is present"
else:
    gitSkip = "git is not present."
Пример #58
0
class LatexSpitter(BaseLatexSpitter):

    baseLevel = 0
    diaHack = bool(procutils.which("dia"))

    def writeNodeData(self, node):
        buf = StringIO()
        getLatexText(node, buf.write, latexEscape)
        self.writer(buf.getvalue().replace('<', '$<$').replace('>', '$>$'))

    def visitNode_head(self, node):
        authorNodes = domhelpers.findElementsWithAttribute(
            node, 'rel', 'author')
        authorNodes = [n for n in authorNodes if n.tagName == 'link']

        if authorNodes:
            self.writer('\\author{')
            authors = []
            for aNode in authorNodes:
                name = aNode.getAttribute('title')
                href = aNode.getAttribute('href')
                if href.startswith('mailto:'):
                    href = href[7:]
                if href:
                    if name:
                        name += ' '
                    name += '$<$' + href + '$>$'
                if name:
                    authors.append(name)

            self.writer(' \\and '.join(authors))
            self.writer('}')

        self.visitNodeDefault(node)

    def visitNode_pre(self, node):
        """
        Writes a I{verbatim} block when it encounters a I{pre} element.

        @param node: The element to process.
        @type node: L{xml.dom.minidom.Element}
        """
        self.writer('\\begin{verbatim}\n')
        buf = StringIO()
        getLatexText(node, buf.write)
        self.writer(tree._removeLeadingTrailingBlankLines(buf.getvalue()))
        self.writer('\\end{verbatim}\n')

    def visitNode_code(self, node):
        fout = StringIO()
        getLatexText(node, fout.write, latexEscape)
        data = lowerUpperRE.sub(r'\1\\linebreak[1]\2', fout.getvalue())
        data = data[:1] + data[1:].replace('.', '.\\linebreak[1]')
        self.writer('\\texttt{' + data + '}')

    def visitNode_img(self, node):
        fileName = os.path.join(self.currDir, node.getAttribute('src'))
        target, ext = os.path.splitext(fileName)
        if self.diaHack and os.access(target + '.dia', os.R_OK):
            ext = '.dia'
            fileName = target + ext
        f = getattr(self, 'convert_' + ext[1:], None)
        if not f:
            return
        target = os.path.join(self.currDir, os.path.basename(target) + '.eps')
        f(fileName, target)
        target = os.path.basename(target)
        self._write_img(target)

    def _write_img(self, target):
        """Write LaTeX for image."""
        self.writer('\\begin{center}\\includegraphics[%%\n'
                    'width=1.0\n'
                    '\\textwidth,height=1.0\\textheight,\nkeepaspectratio]'
                    '{%s}\\end{center}\n' % target)

    def convert_png(self, src, target):
        # XXX there's a *reason* Python comes with the pipes module -
        # someone fix this to use it.
        r = os.system('pngtopnm "%s" | pnmtops -noturn > "%s"' % (src, target))
        if r != 0:
            raise OSError(r)

    def convert_dia(self, src, target):
        # EVIL DISGUSTING HACK
        data = os.popen("gunzip -dc %s" % (src)).read()
        pre = '<dia:attribute name="scaling">\n          <dia:real val="1"/>'
        post = '<dia:attribute name="scaling">\n          <dia:real val="0.5"/>'
        f = open('%s_hacked.dia' % (src), 'wb')
        f.write(data.replace(pre, post))
        f.close()
        os.system('gzip %s_hacked.dia' % (src, ))
        os.system('mv %s_hacked.dia.gz %s_hacked.dia' % (src, src))
        # Let's pretend we never saw that.

        # Silly dia needs an X server, even though it doesn't display anything.
        # If this is a problem for you, try using Xvfb.
        os.system("dia %s_hacked.dia -n -e %s" % (src, target))

    def visitNodeHeader(self, node):
        level = (int(node.tagName[1]) - 2) + self.baseLevel
        self.writer('\n\n\\' + level * 'sub' + 'section{')
        spitter = HeadingLatexSpitter(self.writer, self.currDir, self.filename)
        spitter.visitNodeDefault(node)
        self.writer('}\n')

    def visitNode_a_listing(self, node):
        """
        Writes a I{verbatim} block when it encounters a code listing
        (represented by an I{a} element with a I{listing} class).

        @param node: The element to process.
        @type node: C{xml.dom.minidom.Element}
        """
        fileName = os.path.join(self.currDir, node.getAttribute('href'))
        self.writer('\\begin{verbatim}\n')
        lines = map(str.rstrip, open(fileName).readlines())
        skipLines = int(node.getAttribute('skipLines') or 0)
        lines = lines[skipLines:]
        self.writer(tree._removeLeadingTrailingBlankLines('\n'.join(lines)))
        self.writer('\\end{verbatim}')

        # Write a caption for this source listing
        fileName = os.path.basename(fileName)
        caption = domhelpers.getNodeText(node)
        if caption == fileName:
            caption = 'Source listing'
        self.writer('\parbox[b]{\linewidth}{\\begin{center}%s --- '
                    '\\begin{em}%s\\end{em}\\end{center}}' %
                    (latexEscape(caption), latexEscape(fileName)))

    def visitNode_a_href(self, node):
        supported_schemes = ['http', 'https', 'ftp', 'mailto']
        href = node.getAttribute('href')
        if urlparse.urlparse(href)[0] in supported_schemes:
            text = domhelpers.getNodeText(node)
            self.visitNodeDefault(node)
            if text != href:
                self.writer('\\footnote{%s}' % latexEscape(href))
        else:
            path, fragid = (href.split('#', 1) + [None])[:2]
            if path == '':
                path = self.filename
            else:
                path = os.path.join(os.path.dirname(self.filename), path)
            #if path == '':
            #path = os.path.basename(self.filename)
            #else:
            #    # Hack for linking to man pages from howtos, i.e.
            #    # ../doc/foo-man.html -> foo-man.html
            #    path = os.path.basename(path)

            path = realpath(path)

            if fragid:
                ref = path + 'HASH' + fragid
            else:
                ref = path
            self.writer('\\textit{')
            self.visitNodeDefault(node)
            self.writer('}')
            self.writer('\\loreref{%s}' % ref)

    def visitNode_a_name(self, node):
        self.writer('\\label{%sHASH%s}' %
                    (realpath(self.filename), node.getAttribute('name')))
        self.visitNodeDefault(node)

    def visitNode_table(self, node):
        rows = [[
            col for col in row.childNodes
            if getattr(col, 'tagName', None) in ('th', 'td')
        ] for row in node.childNodes if getattr(row, 'tagName', None) == 'tr']
        numCols = 1 + max([len(row) for row in rows])
        self.writer('\\begin{table}[ht]\\begin{center}')
        self.writer('\\begin{tabular}{@{}' + 'l' * numCols + '@{}}')
        for row in rows:
            th = 0
            for col in row:
                self.visitNode(col)
                self.writer('&')
                if col.tagName == 'th':
                    th = 1
            self.writer('\\\\\n')  #\\ ends lines
            if th:
                self.writer('\\hline\n')
        self.writer('\\end{tabular}\n')
        if node.hasAttribute('title'):
            self.writer('\\caption{%s}' %
                        latexEscape(node.getAttribute('title')))
        self.writer('\\end{center}\\end{table}\n')

    def visitNode_span_footnote(self, node):
        self.writer('\\footnote{')
        spitter = FootnoteLatexSpitter(self.writer, self.currDir,
                                       self.filename)
        spitter.visitNodeDefault(node)
        self.writer('}')

    def visitNode_span_index(self, node):
        self.writer('\\index{%s}\n' % node.getAttribute('value'))
        self.visitNodeDefault(node)

    visitNode_h2 = visitNode_h3 = visitNode_h4 = visitNodeHeader

    start_title = '\\title{'
    end_title = '}\n'

    start_sub = '$_{'
    end_sub = '}$'

    start_sup = '$^{'
    end_sup = '}$'

    start_html = '''\\documentclass{article}
    \\newcommand{\\loreref}[1]{%
    \\ifthenelse{\\value{page}=\\pageref{#1}}%
               { (this page)}%
               { (page \\pageref{#1})}%
    }'''

    start_body = '\\begin{document}\n\\maketitle\n'
    end_body = '\\end{document}'

    start_dl = '\\begin{description}\n'
    end_dl = '\\end{description}\n'
    start_ul = '\\begin{itemize}\n'
    end_ul = '\\end{itemize}\n'

    start_ol = '\\begin{enumerate}\n'
    end_ol = '\\end{enumerate}\n'

    start_li = '\\item '
    end_li = '\n'

    start_dt = '\\item['
    end_dt = ']'
    end_dd = '\n'

    start_p = '\n\n'

    start_strong = start_em = '\\begin{em}'
    end_strong = end_em = '\\end{em}'

    start_q = "``"
    end_q = "''"

    start_div_note = '\\begin{quotation}\\textbf{Note:}'
    end_div_note = '\\end{quotation}'

    start_th = '\\textbf{'
    end_th = '}'
Пример #59
0
 def __init__(self, treetop, branch):
     self.treetop = treetop # also is repository
     self.branch = branch
     self.exe = which(self.vcexe)[0]
Пример #60
0
    Dependency,
    build_in_docker,
    DockerBuild,
    DockerRun,
    PACKAGE,
    PACKAGE_PYTHON,
    PACKAGE_CLI,
    PACKAGE_NODE,
    make_dependencies,
    LintPackage,
)
from ..release import rpm_version

FLOCKER_PATH = FilePath(__file__).parent().parent().parent()

require_fpm = skipIf(not which('fpm'), "Tests require the ``fpm`` command.")
require_rpm = skipIf(not which('rpm'), "Tests require the ``rpm`` command.")
require_rpmlint = skipIf(not which('rpmlint'),
                         "Tests require the ``rpmlint`` command.")
require_dpkg = skipIf(not which('dpkg'), "Tests require the ``dpkg`` command.")
require_lintian = skipIf(not which('lintian'),
                         "Tests require the ``lintian`` command.")

DOCKER_SOCK = '/var/run/docker.sock'


def assert_equal_steps(test_case, expected, actual):
    """
    Assert that the list of provided steps are the same.
    If they are not, display the differences intelligently.