Example #1
0
    def requestReceived(self, command, raw_pathname, params):
        path = compose_path(self.factory.root, raw_pathname)
        if command == b'git-upload-pack':
            subcmd = b'upload-pack'
        elif command == b'git-receive-pack':
            subcmd = b'receive-pack'
        else:
            self.die(b'Unsupported command in request')
            return

        cmd = b'git'
        args = [b'git', subcmd]
        if params.pop(b'turnip-stateless-rpc', None):
            args.append(b'--stateless-rpc')
        if params.pop(b'turnip-advertise-refs', None):
            args.append(b'--advertise-refs')
        args.append(path)

        env = {}
        if subcmd == b'receive-pack' and self.factory.hookrpc_handler:
            # This is a write operation, so prepare config, hooks, the hook
            # RPC server, and the environment variables that link them up.
            ensure_config(path)
            self.hookrpc_key = str(uuid.uuid4())
            self.factory.hookrpc_handler.registerKey(
                self.hookrpc_key, raw_pathname, [])
            ensure_hooks(path)
            env[b'TURNIP_HOOK_RPC_SOCK'] = self.factory.hookrpc_sock
            env[b'TURNIP_HOOK_RPC_KEY'] = self.hookrpc_key

        self.peer = GitProcessProtocol(self)
        reactor.spawnProcess(self.peer, cmd, args, env=env)
Example #2
0
def shlaunchBackground(cmd, desc = None, progressFunc = None, endFunc = None):
    """
    Follow backup process

    The progressFunc in param can follow processus via stdin and stdout.
        - progressFunc is called each time datas are emmited on stdout
        - shlaunchBackground drop process after 60 seconds on inactivity

    @param cmd: the shell command to launch
    @type cmd: str
    @param desc: description in "background action" (optional)
    @type desc: str
    @param progressFunc: callback function to follow processus evolution.
        @see: progressBackup for an example
    @type progressFunc: function
    """
    logger = logging.getLogger()
    logger.info("support.mmctools.shlaunchBackground(\""+str(cmd)+"\")")
    shProcess = shSharedProcessProtocol(cmd)
    if desc == None:
        shProcess.desc = cmd
    else:
        shProcess.desc = desc

    ProcessScheduler().addProcess(shProcess.desc, shProcess)

    if progressFunc:
        shProcess.progressCalc = instancemethod(progressFunc, shProcess, shSharedProcessProtocol)

    if endFunc:
        shProcess.processEnded = instancemethod(endFunc, shProcess, shSharedProcessProtocol)
    reactor.spawnProcess(shProcess, "/bin/sh", ['/bin/sh','-c',cmd],env=os.environ)
Example #3
0
    def test_stdin(self):
        """
        Making sure getPassword accepts a password from standard input by
        running a child process which uses getPassword to read in a string
        which it then writes it out again.  Write a string to the child
        process and then read one and make sure it is the right string.
        """
        p = PasswordTestingProcessProtocol()
        p.finished = Deferred()
        reactor.spawnProcess(
            p,
            sys.executable,
            [sys.executable,
             '-c',
             ('import sys\n'
             'from twisted.python.util import getPassword\n'
              'sys.stdout.write(getPassword())\n'
              'sys.stdout.flush()\n')],
            env={'PYTHONPATH': os.pathsep.join(sys.path)})

        def processFinished((reason, output)):
            reason.trap(ProcessDone)
            self.assertIn((1, 'secret'), output)

        return p.finished.addCallback(processFinished)
Example #4
0
 def buildProtocol(self, addr):
     now = time.time()
     if now - self.last_update > self.timeout: # primitive DoS prevention
         self.last_update = now
         # Note: this does not inherit *any* environment
         reactor.spawnProcess(GitPull(), 'git', args=['git', 'pull'])
     return protocol.ServerFactory.buildProtocol(self, addr)
    def testManyProcesses(self):

        def _check(results, protocols):
            for p in protocols:
                self.assertEquals(p.stages, [1, 2, 3, 4, 5], "[%d] stages = %s" % (id(p.transport), str(p.stages)))
                # test status code
                f = p.reason
                f.trap(error.ProcessTerminated)
                self.assertEquals(f.value.exitCode, 23)

        exe = sys.executable
        scriptPath = util.sibpath(__file__, "process_tester.py")
        args = [exe, "-u", scriptPath]
        protocols = []
        deferreds = []

        for i in xrange(50):
            p = TestManyProcessProtocol()
            protocols.append(p)
            reactor.spawnProcess(p, exe, args, env=None)
            deferreds.append(p.deferred)

        deferredList = defer.DeferredList(deferreds, consumeErrors=True)
        deferredList.addCallback(_check, protocols)
        return deferredList
Example #6
0
    def sendCodeReview(self, project, revision, result):
        gerrit_version = self.getCachedVersion()
        if gerrit_version is None:
            self.callWithVersion(lambda: self.sendCodeReview(project, revision, result))
            return

        command = self._gerritCmd("review", "--project %s" % str(project))
        message = result.get('message', None)
        if message:
            command.append("--message '%s'" % message.replace("'", "\""))

        labels = result.get('labels', None)
        if labels:
            assert gerrit_version
            if gerrit_version < LooseVersion("2.6"):
                add_label = _old_add_label
            else:
                add_label = _new_add_label

            for label, value in labels.items():
                command.extend(add_label(label, value))

        command.append(str(revision))
        print command
        reactor.spawnProcess(self.LocalPP(self), command[0], command)
Example #7
0
    def deliverJob(self):
        # returns a Deferred that fires when the job has been delivered

        if self.connect == "ssh":
            tryhost = self.getopt("tryhost")
            tryuser = self.getopt("username")
            trydir = self.getopt("trydir")

            argv = ["ssh", "-l", tryuser, tryhost, "buildbot", "tryserver", "--jobdir", trydir]
            # now run this command and feed the contents of 'job' into stdin

            pp = RemoteTryPP(self.jobfile)
            reactor.spawnProcess(pp, argv[0], argv, os.environ)
            d = pp.d
            return d
        if self.connect == "pb":
            user = self.getopt("username")
            passwd = self.getopt("passwd")
            master = self.getopt("master")
            tryhost, tryport = master.split(":")
            tryport = int(tryport)
            f = pb.PBClientFactory()
            d = f.login(credentials.UsernamePassword(user, passwd))
            reactor.connectTCP(tryhost, tryport, f)
            d.addCallback(self._deliverJob_pb)
            return d
        raise RuntimeError("unknown connecttype '%s', should be 'ssh' or 'pb'" % self.connect)
    def setUp(self):
        CFTPClientTestBase.setUp(self)

        self.startServer()
        cmds = ('-p %i -l testuser '
               '--known-hosts kh_test '
               '--user-authentications publickey '
               '--host-key-algorithms ssh-rsa '
               '-K direct '
               '-i dsa_test '
               '-a --nocache '
               '-v '
               '127.0.0.1')
        port = self.server.getHost().port
        cmds = test_conch._makeArgs((cmds % port).split(), mod='cftp')
        log.msg('running %s %s' % (sys.executable, cmds))
        self.processProtocol = SFTPTestProcess()

        env = os.environ.copy()
        env['PYTHONPATH'] = os.pathsep.join(sys.path)
        reactor.spawnProcess(self.processProtocol, sys.executable, cmds,
                             env=env)

        timeout = time.time() + 10
        while (not self.processProtocol.buffer) and (time.time() < timeout):
            reactor.iterate(0.1)
        if time.time() > timeout:
            self.skip = "couldn't start process"
        else:
            self.processProtocol.clearBuffer()
Example #9
0
    def run(self):
        started = defer.Deferred()
        self.serverStopped = defer.Deferred()
        self.processStopped = defer.Deferred()

        fact = protocol.Factory()
        fact.protocol = ivc.IVC4300Protocol
        fact.onConnectionMade = started
        fact.onConnectionLost = self.serverStopped

        proc = ivc.IVC4300Process(self.processStopped)

        executable = "C:/smacCapture/capture2.exe"
        path, bin = os.path.split(executable)

        PORT = 6544
        port = reactor.listenTCP(PORT, fact)
        reactor.spawnProcess(proc, executable, [bin], {}, path)

        self.protocol = yield started
        self.portStopped = defer.maybeDeferred(port.stopListening)
        self.portStopped.addCallback(lambda _: log.msg("Stopped listening"))
        yield self.protocol.start()

        print "Start recording session {0} (parent task is {1})".format(self.session.id, self.parent)
Example #10
0
 def run(self):
    self._removeIfExists(self.outfile)
    from twisted.internet import reactor
    reactor.spawnProcess(self,
                         SYSCMD_SQLITE3,
                         ['sqlite3',
                          self.outfile])
Example #11
0
    def testChildResolve(self):
        # I've seen problems with reactor.run under gtk2reactor. Spawn a
        # child which just does reactor.resolve after the reactor has
        # started, fail if it does not complete in a timely fashion.
        helperPath = os.path.abspath(self.mktemp())
        helperFile = open(helperPath, 'w')
        
        # Eeueuuggg
        reactorName = reactor.__module__

        helperFile.write(resolve_helper % {'reactor': reactorName})
        helperFile.close()

        env = os.environ.copy()
        env['PYTHONPATH'] = os.pathsep.join(sys.path)

        helperDeferred = Deferred()
        helperProto = ChildResolveProtocol(helperDeferred)

        reactor.spawnProcess(helperProto, sys.executable, ("python", "-u", helperPath), env)

        def cbFinished((reason, output, error)):
            # If the output is "done 127.0.0.1\n" we don't really care what
            # else happened.
            output = ''.join(output)
            if output != 'done 127.0.0.1\n':
                self.fail((
                    "The child process failed to produce the desired results:\n"
                    "   Reason for termination was: %r\n"
                    "   Output stream was: %r\n"
                    "   Error stream was: %r\n") % (reason.getErrorMessage(), output, ''.join(error)))

        helperDeferred.addCallback(cbFinished)
        return helperDeferred
Example #12
0
def get_status( hg_repo_dir, from_revision = None, to_revision = None ):
    
    hg_repo_dir = os.path.expanduser( os.path.expandvars(hg_repo_dir) )
    
    if not os.path.isdir( os.path.join(hg_repo_dir, '.hg') ):
        return defer.fail( Exception('Invalid HG Repository: %s' % hg_repo_dir) )
    
    
    rev_str = ''
    
    if from_revision:
        rev_str = '%s' % from_revision
        
    if to_revision:
        rev_str = rev_str + ':%s' % to_revision
    
    sp = HgStatusProto()
    
    args = [HG_PATH, 'status']
    
    if rev_str:
        args.append( '--rev' )
        args.append( rev_str )

    reactor.spawnProcess(sp, HG_PATH, args=args, env=None, path=hg_repo_dir)
 
    return sp.d
    def start_canopen(self):

        # Avvio il server CANOPEN
        # usePTY serve ad evitare l'ECHO
        # INFO: uso stdbuf per evitare il buffering dell'output se non in terminale
        if self.config.isFake:
            reactor.spawnProcess(
                self.canopen,
                "/usr/bin/stdbuf",
                args=[
                    "stdbuf",
                    "--output=L",
                    "--input=0",
                    "/opt/spinitalia/alma3d_canopenshell",
                    "fake",
                    "load#libcanfestival_can_socket.so,0,1M,8",
                ],
                env=os.environ,
                usePTY=False,
            )
        else:
            reactor.spawnProcess(
                self.canopen,
                "/usr/bin/stdbuf",
                args=[
                    "stdbuf",
                    "--output=L",
                    "--input=0",
                    "/opt/spinitalia/alma3d_canopenshell",
                    "load#libcanfestival_can_socket.so,0,1M,8",
                ],
                env=os.environ,
                usePTY=False,
            )
Example #14
0
def run_server(fd=None, port=None, procs=None, verbose=False):
    if args.verbose:
        log.startLogging(stdout)
        environ['SOLEDAD_LOG_TO_STDOUT'] = '1'

    config = get_config()
    path = config["blobs_path"]
    if not port:
        port = int(config["blobs_port"])

    root = Resource()
    root.putChild('blobs', BlobsResource("filesystem", path))
    factory = Site(root)

    if fd is None:
        # Create a new listening port and several other
        # processes to help out.
        if procs is None:
            procs = cpu_count()
        log.msg('A total of %d processes will listen on port %d.' % (procs, port))
        port = reactor.listenTCP(port, factory)
        for i in range(procs - 1):
            reactor.spawnProcess(
                None, executable, [executable, __file__, str(port.fileno())],
                childFDs={0: 0, 1: 1, 2: 2, port.fileno(): port.fileno()},
                env=environ)
    else:
        # Another process created the port, just start listening on it.
        log.msg('Adopting file descriptor %d...' % fd)
        port = reactor.adoptStreamPort(fd, AF_INET, factory)

    reactor.run()
Example #15
0
    def execute(self, remoteCommand, process, sshArgs=''):
        """
        Connects to the SSH server started in L{ConchServerSetupMixin.setUp} by
        running the 'ssh' command line tool.

        @type remoteCommand: str
        @param remoteCommand: The command (with arguments) to run on the
        remote end.

        @type process: L{ConchTestOpenSSHProcess}

        @type sshArgs: str
        @param sshArgs: Arguments to pass to the 'ssh' process.

        @return: L{defer.Deferred}
        """
        process.deferred = defer.Deferred()
        cmdline = ('ssh -2 -l testuser -p %i '
                   '-oUserKnownHostsFile=kh_test '
                   '-oPasswordAuthentication=no '
                   # Always use the RSA key, since that's the one in kh_test.
                   '-oHostKeyAlgorithms=ssh-rsa '
                   '-a '
                   '-i dsa_test ') + sshArgs + \
                   ' 127.0.0.1 ' + remoteCommand
        port = self.conchServer.getHost().port
        cmds = (cmdline % port).split()
        reactor.spawnProcess(process, "ssh", cmds)
        return process.deferred
Example #16
0
    def test_noCompatibilityLayer(self):
        """
        If no compatiblity layer is present, imports of gobject and friends
        are disallowed.

        We do this by running a process where we make sure gi.pygtkcompat
        isn't present.
        """
        from twisted.internet import reactor
        if not IReactorProcess.providedBy(reactor):
            raise SkipTest("No process support available in this reactor.")

        result = Deferred()
        class Stdout(ProcessProtocol):
            data = b""

            def errReceived(self, err):
                print(err)

            def outReceived(self, data):
                self.data += data

            def processExited(self, reason):
                result.callback(self.data)

        path = FilePath(__file__.encode("utf-8")).sibling(
            b"process_gireactornocompat.py").path
        reactor.spawnProcess(Stdout(), sys.executable, [sys.executable, path],
                             env=os.environ)
        result.addCallback(self.assertEqual, b"success")
        return result
Example #17
0
    def testCallBeforeStartupUnexecuted(self):
        progname = self.mktemp()
        progfile = open(progname, 'w')
        progfile.write(_callBeforeStartupProgram % {'reactor': reactor.__module__})
        progfile.close()

        def programFinished(result):
            (out, err, reason) = result
            if reason.check(error.ProcessTerminated):
                self.fail("Process did not exit cleanly (out: %s err: %s)" % (out, err))

            if err:
                log.msg("Unexpected output on standard error: %s" % (err,))
            self.failIf(out, "Expected no output, instead received:\n%s" % (out,))

        def programTimeout(err):
            err.trap(error.TimeoutError)
            proto.signalProcess('KILL')
            return err

        env = os.environ.copy()
        env['PYTHONPATH'] = os.pathsep.join(sys.path)
        d = defer.Deferred().addCallbacks(programFinished, programTimeout)
        proto = ThreadStartupProcessProtocol(d)
        reactor.spawnProcess(proto, sys.executable, ('python', progname), env)
        return d
Example #18
0
def logged_run_process(reactor, command):
    """
    Run a child process, and log the output as we get it.

    :param reactor: An ``IReactorProcess`` to spawn the process on.
    :param command: An argument list specifying the child process to run.

    :return: A ``Deferred`` that calls back with ``_ProcessResult`` if the
        process exited successfully, or errbacks with
        ``_CalledProcessError`` otherwise.
    """
    d = Deferred()
    action = TWISTED_CHILD_PROCESS_ACTION(command=command)
    with action.context():
        d2 = DeferredContext(d)
        protocol = _LoggingProcessProtocol(d, action)
        reactor.spawnProcess(protocol, command[0], command)

        def process_ended((reason, output)):
            status = reason.value.status
            if status:
                raise _CalledProcessError(
                    returncode=status, cmd=command, output=output)
            return _ProcessResult(
                command=command,
                status=status,
                output=output,
            )

        d2.addCallback(process_ended)
        d2.addActionFinish()
        return d2.result
 def guestmode(self, request, handler, action='status'):
     if 'admin' not in self._groups:
         return handler.failed(request) # TODO: Permission denied
     
     if action not in ['status', 'enable', 'disable']:
         return handler.failed(request) # TODO: Wrong params
     
     args = [action.encode('utf-8')]
     if action == 'enable':
         reactor.spawnProcess(
             protocol.ProcessProtocol(),
             '/usr/bin/pkexec',
             ['pkexec', '/usr/lib/jolicloud-daemon/utils/migrate-nm-connections'],
             env=os.environ
         )
     
     class GetProcessOutput(protocol.ProcessProtocol):
         out = ''
         def outReceived(self, data):
             self.out += data
         def errReceived(self, data):
             log.msg("[utils/guestmode] [stderr] %s" % data)
         def processEnded(self, status_object):
             if status_object.value.exitCode != 0:
                 return handler.failed(request)
             if action == 'status':
                 handler.send_data(request, self.out.strip())
             handler.success(request)
     reactor.spawnProcess(
         GetProcessOutput(),
         '/usr/bin/pkexec',
         ['pkexec', '/usr/lib/jolicloud-daemon/utils/guestmode'] + args,
         env=os.environ
     )
Example #20
0
    def startProcess(self, childClass, parentClass):
        from twisted.internet import reactor
        childClassPath = self._checkRoundTrip(childClass)
        prot = self.connectorFactory(parentClass())

        bootstrapPath = os.path.join(os.path.dirname(__file__), 'bootstrap.py')

        # Insert required modules into PYTHONPATH if they lie outside the
        # system import locations.
        env = os.environ.copy()
        pythonPath = []
        for pkg in self.packages:
            p = os.path.dirname(imp.find_module(pkg)[1])
            if (p.startswith(os.path.join(sys.prefix, 'lib'))
                    or p.startswith(os.path.join(sys.prefix, 'lib64'))):
                continue
            pythonPath.append(p)
        pythonPath.extend(env.get('PYTHONPATH', '').split(os.pathsep))
        env['PYTHONPATH'] = os.pathsep.join(pythonPath)

        args = (sys.executable, bootstrapPath, childClassPath)
        fds = {connector.TO_CHILD: 'w', connector.FROM_CHILD: 'r'}
        if self.debug:
            fds.update({0: 0, 1: 1, 2: 2})
        else:
            fds.update({0: 'w', 1: 'r', 2: 'r'})
        reactor.spawnProcess(prot, sys.executable, args, env, childFDs=fds)
        return prot
Example #21
0
 def run(self, command, finished=None, env={}, path=None, usePTY=0):
     d = defer.Deferred()
     d.addCallback(self.processEnded, command)
     self.processDirector = ProcessDirector(d, finished, self.timeout)
     self.processDirector.handleRead = self.handleRead
     reactor.spawnProcess(self.processDirector, command[0], command, env=env, path=path, usePTY=usePTY)
     return d
def main():
    (parser, options, args) = parse_args()
    if len(args) == 0:
        parser.print_usage()
        sys.exit(1)
    if options.verbose:
        logger.setLevel(logging.DEBUG)
        logger.debug("verbose logging enabled.")

    # ---------------------------------------------------------------
    #   Terminate the Twisted reactor on a signal.
    # ---------------------------------------------------------------
    def signal_handler(signum, stackframe):
        reactor.callFromThread(reactor.stop)
    signal.signal(signal.SIGINT, signal_handler)
    # ---------------------------------------------------------------

    # ---------------------------------------------------------------
    #   Spawn the process specified when the reactor starts.
    #
    #   We want to run it up to the run count, and after the final
    #   iteration summarise the findings.
    # ---------------------------------------------------------------
    measure_protocol = MeasureProtocol(args = args)
    reactor.spawnProcess(measure_protocol, args[0], args)
    # ---------------------------------------------------------------

    reactor.run()
Example #23
0
 def connectionMade(self):
     from twisted.internet import reactor
     log.msg("launch a new process on each new connection")
     self.pp = ProcessProtocol()
     self.pp.factory = self
     reactor.spawnProcess(self.pp, sys.executable,
                          [sys.executable, '-u', 'wait_for_makey.py'])
Example #24
0
 def launch_player(self, test=False):
     if self.playercmd_args is not None:
         self.player_args = [self.player_path] + self.playercmd_args.split()
     for proc in psutil.process_iter():
         if proc.name() == self.player:
             log.msg("Player process found", loglevel=logging.DEBUG)
             self._managed = False
             self.extpid = proc.pid
             self.juststarted = False
             reactor.callWhenRunning(self.connect)  # @UndefinedVariable
             if test:
                 if self._errors > 5:
                     try:
                         self.protocol.shutdown()
                     except:
                         proc.kill()
                     return False
                 else:
                     self._errors += 1
                     return True
             return None
     if test:
         return False
     self._managed = True
     try:
         reactor.spawnProcess(  # @UndefinedVariable
             PlayerProcess(self), self.player_path, self.player_args, env=os.environ
         )
     except:
         log.err("Program unknown : %s" % self.player_args)
Example #25
0
 def execute(self, remoteCommand, process, sshArgs=""):
     """
     As for L{OpenSSHClientTestCase.execute}, except it runs the 'conch'
     command line tool, not 'ssh'.
     """
     process.deferred = defer.Deferred()
     port = self.conchServer.getHost().port
     cmd = (
         (
             "-p %i -l testuser "
             "--known-hosts kh_test "
             "--user-authentications publickey "
             "--host-key-algorithms ssh-rsa "
             "-a "
             "-i dsa_test "
             "-v "
         )
         % port
         + sshArgs
         + " 127.0.0.1 "
         + remoteCommand
     )
     cmds = _makeArgs(cmd.split())
     log.msg(str(cmds))
     env = os.environ.copy()
     env["PYTHONPATH"] = os.pathsep.join(sys.path)
     reactor.spawnProcess(process, sys.executable, cmds, env=env)
     return process.deferred
Example #26
0
    def execCommand( self, protocol, cmd ):
        c = cmd.split()

        if c[0] == 'scp':
            reactor.spawnProcess( 
                protocol, 
                'scp', ['scp', '-t', '-d', 'file' ] )
Example #27
0
    def sendCodeReview(self, project, revision, message=None, verified=0, reviewed=0):
        gerrit_version = self.getCachedVersion()
        if (verified or reviewed) and gerrit_version is None:
            self.callWithVersion(lambda: self.sendCodeReview(project, revision, message, verified, reviewed))
            return

        command = self._gerritCmd("review", "--project %s" % str(project))
        if message:
            command.append("--message '%s'" % message.replace("'", "\""))

        if verified:
            assert(gerrit_version)
            if gerrit_version < LooseVersion("2.6"):
                command.extend(["--verified %d" % int(verified)])
            else:
                command.extend(["--label Verified=%d" % int(verified)])

        if reviewed:
            assert(gerrit_version)
            if gerrit_version < LooseVersion("2.6"):
                command.extend(["--code-review %d" % int(reviewed)])
            else:
                command.extend(["--label Code-Review=%d" % int(reviewed)])

        command.append(str(revision))
        print command
        reactor.spawnProcess(self.LocalPP(self), command[0], command)
 def _task_reinstall(self, packages=[]):
     if packages:
         reactor.spawnProcess(
             self,
             '/usr/bin/apt-get',
             ['apt-get', '-y', '--force-yes', '-f', '--purge', '--reinstall', 'install'] + map(str, packages), env=None
         )
    def runCommand(self, command, error=False,
        script="calendarserver_command_gateway"):
        """
        Run the given command by feeding it as standard input to
        calendarserver_command_gateway in a subprocess.
        """

        if isinstance(command, unicode):
            command = command.encode("utf-8")

        sourceRoot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
        python = sys.executable
        script = os.path.join(sourceRoot, "bin", script)

        args = [python, script, "-f", self.configFileName]
        if error:
            args.append("--error")

        cwd = sourceRoot

        deferred = Deferred()
        reactor.spawnProcess(CapturingProcessProtocol(deferred, command), python, args, env=os.environ, path=cwd)
        output = yield deferred
        try:
            plist = readPlistFromString(output)
        except xml.parsers.expat.ExpatError, e:
            print("Error (%s) parsing (%s)" % (e, output))
            raise
 def dataReceived(self, data):
    #if VERBOSE: print "Received",repr(data)
    if self.state == 0 and data == "Hopper":
       self.state = 1
       data = data[6:]
       self.transport.write(struct.pack("<H", HOPPER_GDB_PROTOCOL_VERSION))
    if self.state == 1 and len(data) >= 2:
       self.state = 2
       remote_version, = struct.unpack("<H", data[:2])
       data = data[2:]
       if remote_version != HOPPER_GDB_PROTOCOL_VERSION:
          if VERBOSE: print "Unsupported version",remote_version
          self.transport.loseConnection()
          return
    if self.state == 2 and len(data) > 0:
       self.gdb_arch = data.strip("\x00")
       self.state = 3
       data = ""
    if self.state == 3:
       if sys.platform == "darwin":
          launch_args = ["gdb", "--arch=%s"%self.gdb_arch, "--quiet", "--nx", "--interpreter=mi1"]
       else:
          launch_args = ["gdb", "--quiet", "--nx", "--interpreter=mi1"]
       if self.override_args and len(self.override_args):
          launch_args.extend(["--args", self.override_file]+self.override_args)
       elif self.override_file:
          launch_args.append(self.override_file)         
       if VERBOSE: print "Launch:",str(launch_args)
       self.gdb = GDBProtocol(self)
       reactor.spawnProcess(self.gdb, "/usr/bin/gdb", args=launch_args)
    if self.state == 4 and self.gdb != None:
       if VERBOSE: print "WRITE TO GDB",repr(data)
       data = self.modifyCommand(data)
       self.gdb.transport.write(data)
Example #31
0
    def run(self, test_case=None, start_deferred=None):
        """Execute a SIPp scenario

        Execute the SIPp scenario that was passed to this object

        Keyword Arguments:
        _test_case  If not None, the scenario will automatically evaluate its
                    pass/fail status at the end of the run. In the event of a
                    failure, it will fail the test case scenario and call
                    stop_reactor.

        Returns:
        A deferred that can be used to determine when the SIPp Scenario
        has exited.
        """
        def __scenario_callback(result):
            """Callback called when a scenario completes"""
            self.exited = True
            self.result = result
            if (result.exitcode == 0):
                self.passed = True
                LOGGER.info("SIPp Scenario %s Exited" %
                            (self.scenario['scenario']))
            else:
                LOGGER.warning("SIPp Scenario %s Failed [%d]" %
                               (self.scenario['scenario'], result.exitcode))
            self._our_exit_deferred.callback(self)
            return result

        def __evaluate_scenario_results(result):
            """Convenience function. If the test case is injected into this
            method, then auto-fail the test if the scenario fails. """
            if not self.passed:
                LOGGER.warning("SIPp Scenario %s Failed" %
                               self.scenario['scenario'])
                self._test_case.passed = False
                self._test_case.stop_reactor()
            return result

        self.result = None
        sipp_args = [
            self.sipp,
            self.target,
            '-sf',
            '%s/sipp/%s' % (self.test_dir, self.scenario['scenario']),
            '-nostdin',
            '-skip_rlimit',
        ]

        default_args = {
            '-p': str(self.default_port),
            '-m': '1',
            '-i': '127.0.0.1',
            '-timeout': '20s'
        }

        # Override and extend defaults
        default_args.update(self.scenario)
        del default_args['scenario']

        # correct the path specified by -slave_cfg
        if '-slave_cfg' in default_args:
            default_args['-slave_cfg'] = (
                '%s/sipp/%s' % (self.test_dir, default_args['-slave_cfg']))

        if '-inf' in default_args:
            default_args['-inf'] = ('%s/sipp/%s' %
                                    (self.test_dir, default_args['-inf']))

        if '-oocsf' in default_args:
            default_args['-oocsf'] = ('%s/sipp/%s' %
                                      (self.test_dir, default_args['-oocsf']))

        if '-mp' not in default_args:
            # Current SIPp correctly chooses an available port for audio, but
            # unfortunately it then attempts to bind to the audio port + n for
            # things like rtcp and video without first checking if those other
            # ports are unused (https://github.com/SIPp/sipp/issues/276).
            #
            # So as a work around, if not given, we'll specify the media port
            # ourselves, and make sure all associated ports are available.
            #
            # num = 4 = ports for audio rtp/rtcp and video rtp/rtcp
            default_args['-mp'] = str(
                get_available_port(config=default_args, num=4))

        for (key, val) in default_args.items():
            sipp_args.extend([key, val])

        # The majority of tests do no need re-transmissions enabled. As a
        # matter of fact most tests will fail if sipp re-transmits a message.
        # By default disable all re-transmissions in a scenario unless
        # explicitly told to allow them.
        if '-enable-retrans' in self.positional_args:
            sipp_args.extend(
                [i for i in self.positional_args if i != '-enable-retrans'])
        else:
            sipp_args.append('-nr')
            sipp_args.extend(self.positional_args)

        LOGGER.info("Executing SIPp scenario: %s" % self.scenario['scenario'])
        LOGGER.debug(sipp_args)

        self._our_exit_deferred = defer.Deferred()

        exit_deferred = defer.Deferred()
        exit_deferred.addCallback(__scenario_callback)
        if test_case:
            self._test_case = test_case
            exit_deferred.addCallback(__evaluate_scenario_results)

        self._process = SIPpProtocol(self.scenario['scenario'], exit_deferred,
                                     start_deferred)
        reactor.spawnProcess(self._process, sipp_args[0], sipp_args, {
            "TERM": "vt100",
        }, None, None)
        return self._our_exit_deferred
Example #32
0
def main():
    usage = '%prog [options] (CMD CMDARGS|-|filename)'
    description = 'Command line interaction with a VNC server'

    op = VNCDoToolOptionParser(usage=usage, description=description)
    op.disable_interspersed_args()

    op.add_option('-d', '--display', action='store', metavar='DISPLAY',
        type='int', default=0,
        help='connect to vnc server display :DISPLAY [%default]')

    op.add_option('-p', '--password', action='store', metavar='PASSwORD',
        help='use password to access server')

    op.add_option('-s', '--server', action='store', metavar='ADDRESS',
        default='127.0.0.1',
        help='connect to vnc server at ADDRESS[:PORT] [%default]')

    op.add_option('--delay', action='store', metavar='MILLISECONDS',
        default=os.environ.get('VNCDOTOOL_DELAY', 0), type='int',
        help='delay MILLISECONDS between actions [%defaultms]')

    op.add_option('-v', '--verbose', action='store_true')

    op.add_option('--viewer', action='store', metavar='CMD',
        default='/usr/bin/vncviewer',
        help='Use CMD to launch viewer in session mode [%default]')

    options, args = op.parse_args()
    if not len(args):
        op.error('no command provided')

    try:
        options.host, options.port = options.server.split(':')
    except ValueError:
        options.host = options.server
        options.port = options.display + 5900
    options.port = int(options.port)

    if 'record' in args:
        args.pop(0)
        port = int(args.pop(0))
        output = args.pop(0)
        factory = build_proxy(options, port)
        if output == '-':
            factory.logger = sys.stdout.write
        else:
            factory.logger = open(output, 'w').write
    elif 'service' in args:
        args.pop(0)
        port = int(args.pop(0))
        factory = build_proxy(options, port)
        factory.logger = None
    elif 'viewer' in args:
        args.pop(0)
        output = args.pop(0)
        port = find_free_port()
        factory = build_proxy(options, port)
        if output == '-':
            factory.logger = sys.stdout.write
        else:
            factory.logger = open(output, 'w').write

        cmd = '%s localhost::%s' % (options.viewer, port)
        proc = reactor.spawnProcess(ExitingProcess(),
                                    options.viewer, cmd.split(),
                                    env=os.environ)
    else:
        factory = build_tool(options, args)

    if options.password:
        factory.password = options.password

    if options.verbose:
        log.msg('connecting to %s:%s' % (options.host, options.port))
        factory.logger = log.msg
        log.startLogging(sys.stdout)

    reactor.run()

    sys.exit(reactor.exit_status)
Example #33
0
 def test__propagates_exit_errors(self):
     proto = JSONPerLineProtocol(callback=lambda json: None)
     reactor.spawnProcess(proto, b"false", (b"false", ))
     with ExpectedException(ProcessTerminated, ".* exit code 1"):
         yield proto.done
Example #34
0
 def spawnProcess(self, *arg, **kw):
     reactor.spawnProcess(*arg, **kw)
Example #35
0
 def runProcess(self, env, request, qargs = []):
    p = CGIProcessProtocol(request)
    from twisted.internet import reactor
    reactor.spawnProcess(p, self.filter, [self.filter, self.filename], env, os.path.dirname(self.filename))
Example #36
0
 def _spawn_ais(self, game):
     logging.debug(game.name)
     excludes = set()
     for game3 in self.games:
         if not game3.over:
             for player in game3.players:
                 player_id = self.results.get_player_id(player.player_info)
                 excludes.add(player_id)
     for game_name, waiting_ais in self.game_to_waiting_ais.iteritems():
         game2 = self.name_to_game(game_name)
         if game2 and not game2.over and game2 != game:
             for ainame in waiting_ais:
                 player_id = int(ainame[2:])
                 excludes.add(player_id)
     num_ais = game.min_players - game.num_players
     logging.debug("%s min_players %d num_players %d num_ais_needed %s",
                   game.name, game.min_players, game.num_players, num_ais)
     logging.debug("%s excludes %s", game.name, sorted(excludes))
     ainames = []
     for unused in xrange(num_ais):
         player_id = self.results.get_weighted_random_player_id(
             excludes=excludes, highest_mu=game.any_humans)
         excludes.add(player_id)
         ainame = "ai%d" % player_id
         ainames.append(ainame)
     for ainame in ainames:
         if self._passwd_for_playername(ainame) is None:
             self._add_playername_with_random_password(ainame)
     logging.debug("%s ainames %s", game.name, ainames)
     # Add all AIs to the wait list first, to avoid a race.
     self.game_to_waiting_ais[game.name] = set(ainames)
     if hasattr(sys, "frozen"):
         # TODO Find the absolute path.
         executable = "slugathon.exe"
     else:
         executable = sys.executable
     logdir = os.path.join(TEMPDIR, "slugathon")
     if not os.path.exists(logdir):
         os.makedirs(logdir)
     for ainame in ainames:
         pp = AIProcessProtocol(self, game.name, ainame)
         args = [executable]
         if hasattr(sys, "frozen"):
             args.extend(["ai"])
         else:
             args.extend(["-m", "slugathon.ai.AIClient"])
         args.extend([
             "--playername",
             ainame,
             "--port",
             str(self.port),
             "--game-name",
             game.name,
             "--log-path",
             os.path.join(logdir,
                          "slugathon-%s-%s.log" % (game.name, ainame)),
             "--ai-time-limit",
             str(game.ai_time_limit),
         ])
         if not self.no_passwd:
             aipass = self._passwd_for_playername(ainame)
             if aipass is None:
                 logging.warning(
                     "user %s is not in %s; ai will fail to join" %
                     (ainame, self.passwd_path))
             else:
                 args.extend(["--password", aipass])
         logging.info("spawning AI process for %s %s", game, ainame)
         reactor.spawnProcess(pp, executable, args=args, env=os.environ)
Example #37
0
 def runProcess(self, env, request, qargs=[]):
     p = CGIProcessProtocol(request)
     reactor.spawnProcess(p, self.filter, [self.filter, self.filename]+qargs, env, os.path.dirname(self.filename))
Example #38
0
 def open_console(self):
     term = settings.get("term")
     args = [term, "-e", abspath_vde(self.term_command), self.console()]
     get_args = lambda: " ".join(args)
     logger.info(open_console, name=self.name, args=get_args)
     reactor.spawnProcess(TermProtocol(), term, args, os.environ)
        print "outReceived called with %d bytes of data:\n%s" % (len(data),
                                                                 data)

    def errReceived(self, data):
        print "errReceived called with %d bytes of data:\n%s" % (len(data),
                                                                 data)

    def inConnectionLost(self):
        print "inConnectionLost called, stdin closed."

    def outConnectionLost(self):
        print "outConnectionLost called, stdout closed."

    def errConnectionLost(self):
        print "errConnectionLost called, stderr closed."

    def processExited(self, reason):
        print "processExited called with status %d" % (reason.value.exitCode, )

    def processEnded(self, reason):
        print "processEnded called with status %d" % (reason.value.exitCode, )
        print "All FDs are now closed, and the process has been reaped."
        reactor.stop()


pp = EchoProcessProtocol()

commandAndArgs = ["twistd", "-ny", "echo_server.tac"]
reactor.spawnProcess(pp, commandAndArgs[0], args=commandAndArgs)
reactor.run()
Example #40
0
    def inConnectionLost(self):
        print("inConnectionLost! stdin is closed! (we probably did it)")

    def outConnectionLost(self):
        print("outConnectionLost! The child closed their stdout!")
        # now is the time to examine what they wrote
        # print("I saw them write:", self.data)
        # (dummy, lines, words, chars, file) = re.split(r'\s+', self.data)
        print("I saw %s lines" % self.lines)

    def errConnectionLost(self):
        print("errConnectionLost! The child closed their stderr.")

    def processExited(self, reason):
        print("processExited, status %d" % (reason.value.exitCode, ))

    def processEnded(self, reason):
        print("processEnded, status %d" % (reason.value.exitCode, ))
        print("quitting")
        reactor.stop()


if __name__ == "__main__":
    #yolov5 = Yolov5(10)
    #reactor.spawnProcess(yolov5, "detect.py", ["detect.py", "--source", "test.mp4"], {})
    serpent = SerpentAI("wow", "SerpentwowGameAgent")
    reactor.spawnProcess(serpent, "game.py",
                         ["game.py", "play", "wow", "SerpentwowGameAgent"], {})
    reactor.run()
Example #41
0
    def start_client(self):
        # this returns a Deferred that fires with the client's control.furl
        log.msg("MAKING CLIENT")
        # self.testdir is an absolute Unicode path
        clientdir = self.clientdir = os.path.join(self.testdir, u"client")
        clientdir_str = clientdir.encode(get_filesystem_encoding())
        quiet = StringIO()
        create_node.create_node({'basedir': clientdir}, out=quiet)
        log.msg("DONE MAKING CLIENT")
        # now replace tahoe.cfg
        # set webport=0 and then ask the node what port it picked.
        f = open(os.path.join(clientdir, "tahoe.cfg"), "w")
        f.write("[node]\n"
                "web.port = tcp:0:interface=127.0.0.1\n"
                "[client]\n"
                "introducer.furl = %s\n"
                "shares.happy = 1\n"
                "[storage]\n" % (self.introducer_furl, ))

        if self.mode in ("upload-self", "receive"):
            # accept and store shares, to trigger the memory consumption bugs
            pass
        else:
            # don't accept any shares
            f.write("readonly = true\n")
            ## also, if we do receive any shares, throw them away
            #f.write("debug_discard = true")
        if self.mode == "upload-self":
            pass
        f.close()
        self.keepalive_file = os.path.join(clientdir,
                                           client.Client.EXIT_TRIGGER_FILE)
        # now start updating the mtime.
        self.touch_keepalive()
        ts = internet.TimerService(1.0, self.touch_keepalive)
        ts.setServiceParent(self.sparent)

        pp = ClientWatcher()
        self.proc_done = pp.d = defer.Deferred()
        logfile = os.path.join(self.basedir, "client.log")
        tahoes = procutils.which("tahoe")
        if not tahoes:
            raise RuntimeError("unable to find a 'tahoe' executable")
        cmd = [tahoes[0], "run", ".", "-l", logfile]
        env = os.environ.copy()
        self.proc = reactor.spawnProcess(pp,
                                         cmd[0],
                                         cmd,
                                         env,
                                         path=clientdir_str)
        log.msg("CLIENT STARTED")

        # now we wait for the client to get started. we're looking for the
        # control.furl file to appear.
        furl_file = os.path.join(clientdir, "private", "control.furl")
        url_file = os.path.join(clientdir, "node.url")

        def _check():
            if pp.ended and pp.ended.value.status != 0:
                # the twistd process ends normally (with rc=0) if the child
                # is successfully launched. It ends abnormally (with rc!=0)
                # if the child cannot be launched.
                raise ChildDidNotStartError(
                    "process ended while waiting for startup")
            return os.path.exists(furl_file)

        d = self.poll(_check, 0.1)

        # once it exists, wait a moment before we read from it, just in case
        # it hasn't finished writing the whole thing. Ideally control.furl
        # would be created in some atomic fashion, or made non-readable until
        # it's ready, but I can't think of an easy way to do that, and I
        # think the chances that we'll observe a half-write are pretty low.
        def _stall(res):
            d2 = defer.Deferred()
            reactor.callLater(0.1, d2.callback, None)
            return d2

        d.addCallback(_stall)

        def _read(res):
            # read the node's URL
            self.webish_url = open(url_file, "r").read().strip()
            if self.webish_url[-1] == "/":
                # trim trailing slash, since the rest of the code wants it gone
                self.webish_url = self.webish_url[:-1]
            f = open(furl_file, "r")
            furl = f.read()
            return furl.strip()

        d.addCallback(_read)
        return d
Example #42
0
def runProcess(cmdArgs,
               stdoutf=None,
               stderrf=None,
               expected=None,
               initialText=None,
               addEnv=None,
               newEnv=None,
               workingDir=None,
               uid=None,
               gid=None,
               log=False):
    """
    The only required function is cmdArgs.  cmdArgs is a list of strings, cmdArgs[0] must be the executable.
    stdoutf and stderrf are functions that will be called with the input data.  There is no guarantee
    the input data will be line terminated
    expected is a list of integers that are valid exit codes for the application
    initialText is any text to be sent to the program before closing stdin on it
    addEnv allows one to add keys to the current applications environment
    newEnv specifies a totally new environment to run the child under.  The current applications env
    is the default value
    workingDir is what directory to run the child process in
    uid and gid are numeric user id and group id to run program as
    
    This returns a deferred which will be fired on program exit
    """

    cmdArgs = [str(c) for c in cmdArgs]

    if newEnv is None:
        newEnv = dict(os.environ)

    if addEnv:
        newEnv = func.updateDict(newEnv, addEnv)

    if expected is None:
        expected = [0]

    pp = NonInteractiveProcessProtocol(stdoutf=stdoutf,
                                       stderrf=stderrf,
                                       expected=expected,
                                       initialText=initialText)

    kwargs = {}
    if workingDir:
        kwargs['path'] = workingDir
    if uid:
        kwargs['uid'] = uid
    if gid:
        kwargs['gid'] = gid

    if log:
        logger.msg('Running command: ' + ' '.join(cmdArgs))

    reactor.spawnProcess(pp,
                         executable=cmdArgs[0],
                         args=cmdArgs,
                         env=newEnv,
                         **kwargs)

    def _error(_):
        raise ProgramRunError(cmdArgs)

    pp.deferred.addErrback(_error)
    return pp.deferred
Example #43
0
    def errReceived(self, data):
        print("errReceived!", data)


if __name__ == "__main__":
    # multiprocessing.freeze_support()
    pp = MyPP()
    # command = ['screen', '-x']
    command = ['bash']

    # does this work in WINDOWS?
    def theFunc(a):
        a.run()

    reactor.spawnProcess(pp,
                         command[0],
                         command, {'TERM': 'xterm'},
                         usePTY=True)
    # print("{MIDDLE}")
    p = threading.Thread(target=theFunc, args=(reactor, ))
    # print("{AHEAD}")
    # somehow.
    # all dead here. not even better than JS.
    p.start()  # not RUN!
    # what the heck?
    # with TIMESTAMP.
    # print("{OF}")
    ik = 5
    # not working here.
    while ik > 0:
        pp.write(b"ls\n")
        print("[HELLO WORLD]")
Example #44
0
    def deliverJob(self):
        # returns a Deferred that fires when the job has been delivered
        if self.connect == "ssh":
            tryhost = self.getopt("host")
            tryport = self.getopt("port")
            tryuser = self.getopt("username")
            trydir = self.getopt("jobdir")
            buildbotbin = self.getopt("buildbotbin")
            ssh_command = self.getopt("ssh")
            if not ssh_command:
                ssh_commands = which("ssh")
                if not ssh_commands:
                    raise RuntimeError(
                        "couldn't find ssh executable, make sure "
                        "it is available in the PATH")

                argv = [ssh_commands[0]]
            else:
                # Split the string on whitespace to allow passing options in
                # ssh command too, but preserving whitespace inside quotes to
                # allow using paths with spaces in them which is common under
                # Windows. And because Windows uses backslashes in paths, we
                # can't just use shlex.split there as it would interpret them
                # specially, so do it by hand.
                if runtime.platformType == 'win32':
                    # Note that regex here matches the arguments, not the
                    # separators, as it's simpler to do it like this. And then we
                    # just need to get all of them together using the slice and
                    # also remove the quotes from those that were quoted.
                    argv = [
                        string.strip(a, '"') for a in re.split(
                            r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]
                    ]
                else:
                    # Do use standard tokenization logic under POSIX.
                    argv = shlex.split(ssh_command)

            if tryuser:
                argv += ["-l", tryuser]

            if tryport:
                argv += ["-p", tryport]

            argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir]
            pp = RemoteTryPP(self.jobfile)
            reactor.spawnProcess(pp, argv[0], argv, os.environ)
            d = pp.d
            return d
        if self.connect == "pb":
            user = self.getopt("username")
            passwd = self.getopt("passwd")
            master = self.getopt("master")
            tryhost, tryport = master.split(":")
            tryport = int(tryport)
            f = pb.PBClientFactory()
            d = f.login(
                credentials.UsernamePassword(unicode2bytes(user),
                                             unicode2bytes(passwd)))
            reactor.connectTCP(tryhost, tryport, f)
            d.addCallback(self._deliverJob_pb)
            return d
        raise RuntimeError("unknown connecttype '{}', "
                           "should be 'ssh' or 'pb'".format(self.connect))
Example #45
0
 def __init__(self, mode='433', input_rate=0, context=None):
     assert input_rate > 0
     assert context is not None
     gr.hier_block2.__init__(
         self, type(self).__name__,
         gr.io_signature(1, 1, gr.sizeof_gr_complex),
         gr.io_signature(0, 0, 0))
     
     # The input bandwidth chosen is not primarily determined by the bandwidth of the input signals, but by the frequency error of the transmitters. Therefore it is not too critical, and we can choose the exact rate to make the filtering easy.
     if input_rate <= upper_preferred_demod_rate:
         # Skip having a filter at all.
         self.__band_filter = None
         demod_rate = input_rate
     else:
         # TODO: This gunk is very similar to the stuff that MultistageChannelFilter does. See if we can share some code.
         lower_rate = input_rate
         lower_rate_prev = None
         while lower_rate > upper_preferred_demod_rate and lower_rate != lower_rate_prev:
             lower_rate_prev = lower_rate
             if lower_rate % 5 == 0 and lower_rate > upper_preferred_demod_rate * 3:
                 lower_rate /= 5
             elif lower_rate % 2 == 0:
                 lower_rate /= 2
             else:
                 # non-integer ratio
                 lower_rate = upper_preferred_demod_rate
                 break
         demod_rate = lower_rate
         
         self.__band_filter = MultistageChannelFilter(
             input_rate=input_rate,
             output_rate=demod_rate,
             cutoff_freq=demod_rate * 0.4,
             transition_width=demod_rate * 0.2)
     
     # Subprocess
     # using /usr/bin/env because twisted spawnProcess doesn't support path search
     # pylint: disable=no-member
     process = the_reactor.spawnProcess(
         RTL433ProcessProtocol(context.output_message),
         '/usr/bin/env',
         env=None,  # inherit environment
         args=['env', 'rtl_433',
             '-F', 'json',
             '-r', '-',  # read from stdin
             '-m', '3',  # complex float input
             '-s', str(demod_rate),
         ],
         childFDs={
             0: 'w',
             1: 'r',
             2: 2
         })
     sink = make_sink_to_process_stdin(process, itemsize=gr.sizeof_gr_complex)
     
     agc = analog.agc2_cc(reference=dB(-4))
     agc.set_attack_rate(200 / demod_rate)
     agc.set_decay_rate(200 / demod_rate)
     
     if self.__band_filter:
         self.connect(
             self,
             self.__band_filter,
             agc)
     else:
         self.connect(
             self,
             agc)
     self.connect(agc, sink)
Example #46
0
def standalone():
    """
    Initializes Tornado and our application.  Forks worker processes to handle
    requests.  Does not return until all child processes exit normally.
    """
    # Hack to work-around issue with Cyclone and UNIX domain sockets
    twisted.internet.address.UNIXAddress.host = "localhost"

    # Parse arguments
    parser = argparse.ArgumentParser(description="Crest web server")
    parser.add_argument("--background",
                        action="store_true",
                        help="Detach and run server in background")
    parser.add_argument("--signaling-namespace",
                        action="store_true",
                        help="Server running in signaling namespace")
    parser.add_argument("--worker-processes", default=1, type=int)
    parser.add_argument("--shared-http-tcp-fd", default=None, type=int)
    parser.add_argument("--process-id", default=0, type=int)
    parser.add_argument("--log-level", default=2, type=int)
    args = parser.parse_args()

    # Set process name.
    prctl.prctl(prctl.NAME, settings.PROCESS_NAME)

    # We don't initialize logging until we fork because we want each child to
    # have its own logging and it's awkward to reconfigure logging that is
    # defined by the parent.
    application = create_application()

    if args.background:
        # Get a new logfile, rotating the old one if present.
        err_log_name = os.path.join(settings.LOGS_DIR,
                                    settings.LOG_FILE_PREFIX + "-err.log")
        try:
            os.rename(err_log_name, err_log_name + ".old")
        except OSError:
            pass
        # Fork into background.
        utils.daemonize(err_log_name)

    utils.install_sigusr1_handler(settings.LOG_FILE_PREFIX)

    # Setup logging
    syslog.openlog(settings.LOG_FILE_PREFIX, syslog.LOG_PID)

    logging_config.configure_logging(
        utils.map_clearwater_log_level(args.log_level), settings.LOGS_DIR,
        settings.LOG_FILE_PREFIX, args.process_id)

    twisted.python.log.addObserver(on_twisted_log)

    pdlogs.CREST_STARTING.log()

    # setup accumulators and counters for statistics gathering
    api.base.setupStats(args.process_id, args.worker_processes)

    # Initialize reactor ports and create worker sub-processes
    if args.process_id == 0:
        # Main process startup, create pidfile.

        # We must keep a reference to the file object here, as this keeps
        # the file locked and provides extra protection against two processes running at
        # once.
        pidfile_lock = None
        try:
            pidfile_lock = utils.lock_and_write_pid_file(
                settings.PID_FILE)  # noqa
        except IOError:
            # We failed to take the lock - another process is already running
            exit(1)

        # Create UNIX domain socket for nginx front-end (used for
        # normal operation and as a bridge from the default namespace to the signaling
        # namespace in a multiple interface configuration).
        bind_safely(reactor, args.process_id, application)
        pdlogs.CREST_UP.log()

        if args.signaling_namespace and settings.PROCESS_NAME == "homer":
            # Running in signaling namespace as Homer, create TCP socket for XDMS requests
            # from signaling interface
            _log.info("Going to listen for HTTP on TCP port %s",
                      settings.HTTP_PORT)
            http_tcp_port = reactor.listenTCP(settings.HTTP_PORT,
                                              application,
                                              interface=settings.LOCAL_IP)

            # Spin up worker sub-processes, passing TCP file descriptor
            for process_id in range(1, args.worker_processes):
                reactor.spawnProcess(
                    None,
                    executable, [
                        executable, __file__, "--shared-http-tcp-fd",
                        str(http_tcp_port.fileno()), "--process-id",
                        str(process_id)
                    ],
                    childFDs={
                        0: 0,
                        1: 1,
                        2: 2,
                        http_tcp_port.fileno(): http_tcp_port.fileno()
                    },
                    env=os.environ)
        else:
            # Spin up worker sub-processes
            for process_id in range(1, args.worker_processes):
                reactor.spawnProcess(
                    None,
                    executable,
                    [executable, __file__, "--process-id",
                     str(process_id)],
                    childFDs={
                        0: 0,
                        1: 1,
                        2: 2
                    },
                    env=os.environ)
    else:
        # Sub-process startup, ensure we die if our parent does.
        prctl.prctl(prctl.PDEATHSIG, signal.SIGTERM)

        # Create UNIX domain socket for nginx front-end based on process ID.
        bind_safely(reactor, args.process_id, application)

        # Create TCP socket if file descriptor was passed.
        if args.shared_http_tcp_fd:
            reactor.adoptStreamPort(args.shared_http_tcp_fd, AF_INET,
                                    application)

    # We need to catch the shutdown request so that we can properly stop
    # the ZMQ interface; otherwise the reactor won't shut down on a SIGTERM
    # and will be SIGKILLed when the service is stopped.
    reactor.addSystemEventTrigger('before', 'shutdown', on_before_shutdown)

    # Kick off the reactor to start listening on configured ports
    reactor.run()
Example #47
0
    def test_put(self):
        State.tenant_cache[1].anonymize_outgoing_connections = False

        # Add a file to the tmp dir
        with open('./robots.txt', 'w') as f:
            f.write("User-agent: *\n" + "Allow: /\n" +
                    "Sitemap: http://localhost/sitemap.xml")

        # Start the HTTP server proxy requests will be forwarded to.
        self.pp = helpers.SimpleServerPP()

        # An extended SimpleHTTPServer to handle the addition of the globaleaks header
        e = ""+\
        "from SimpleHTTPServer import SimpleHTTPRequestHandler as rH; "+\
        "from SimpleHTTPServer import test as t; "+\
        "of = rH.end_headers; rH.end_headers = lambda s: s.send_header('Server', 'GlobaLeaks') or of(s); "+\
        "t(HandlerClass=rH)"

        yield reactor.spawnProcess(self.pp,
                                   'python',
                                   args=['python', '-c', e, '43434'],
                                   usePTY=True)

        yield self.pp.start_defer

        handler = self.request(
            {
                'operation': 'verify_hostname',
                'args': {
                    'value': 'antani.gov'
                }
            },
            role='admin')
        yield self.assertFailure(handler.put(), errors.ExternalResourceError)

        handler = self.request(
            {
                'operation': 'verify_hostname',
                'args': {
                    'value': 'localhost:43434'
                }
            },
            role='admin')
        yield handler.put()

        for value in [
                '', 'onion', 'localhost', 'antani.onion', 'antani.localhost'
        ]:
            handler = self.request(
                {
                    'operation': 'set_hostname',
                    'args': {
                        'value': value
                    }
                },
                role='admin')
            yield self.assertFailure(handler.put(),
                                     errors.InputValidationError)

        handler = self.request(
            {
                'operation': 'set_hostname',
                'args': {
                    'value': 'antani.gov'
                }
            },
            role='admin')
        yield handler.put()
Example #48
0
            return deferred

        p = yadtshell.twisted.YadtProcessProtocol(component,
                                                  cmd,
                                                  self.pi,
                                                  out_log_level=out_log_level,
                                                  err_log_level=err_log_level,
                                                  log_prefix=re.sub(
                                                      '^.*://', '',
                                                      component.uri))
        p.target_state = target_state
        p.state = yadtshell.settings.UNKNOWN

        cmdline = shlex.split(cmdline)
        self.logger.debug('cmd: %s' % cmdline)
        reactor.spawnProcess(p, cmdline[0], cmdline, None)
        return p.deferred

    def log_host_finished(self, action):
        self.logger.info(
            yadtshell.settings.term.render(
                '    ${BOLD}%(uri)s finished successfully${NORMAL}' %
                vars(action)))

    def next_with_preconditions(self, queue):
        for task in queue:
            action = task.action
            if not isinstance(action, yadtshell.actions.ActionPlan):
                if action.state != yadtshell.actions.State.PENDING:
                    continue
                if not action.are_all_preconditions_met(self.components):
Example #49
0
    def run_update(self):
        """Run update script.

        Returns a Deferred, which either raises an update-related exception (ending up
        in caller's errback), or returns None if the update completes normally.
        """
        def _script_timeout():
            _log.warning('_script_timeout()')
            self.script_timeout_call = None

            # this is not nice, but what else to do here?
            if self._update_process_protocol is not None:
                self._update_process_protocol.sendTerm()
            self.update_failed = True
            self.update_exit_code = 3  # XXX: fake update exit code to cause UpdateFailedError
            self.stop_twisted()

        def _update_completed(res):
            _log.debug('_update_completed()')
            if self.script_timeout_call is not None:
                self.script_timeout_call.cancel()
                self.script_timeout_call = None

            # XXX: In a script timeout case, we get here with 'res' not being
            # an integer.  This is not nice, but causes no actual problems.

            self.update_exit_code = int(res)  # store exit code
            _log.info('update exit code: %s' % self.update_exit_code)
            return None

        _log.info('update needed, starting update process')

        # export configuration before update from sqlite so that new code after
        # update has the option of re-creating the sqlite database or switch to
        # a new backend format without resorting to ugly sqlite dependencies
        try:
            _log.info('exporting rdf/xml for update')
            self._export_rdfxml_for_update()
            _log.info('export rdf/xml for update successful')
        except:
            _log.exception('_export_rdfxml_for_update() failed, ignoring')
            try:
                if os.path.exists(constants.UPDATE_PROCESS_RDFXML_EXPORT_FILE):
                    os.unlink(constants.UPDATE_PROCESS_RDFXML_EXPORT_FILE)
            except:
                _log.exception('_export_rdfxml_for_update(), cleanup failed')

        # set sources.list
        helpers.write_file('/etc/apt/sources.list', self.sources)

        # set repository keys
        helpers.write_file(constants.UPDATE_REPOSITORY_KEYS_FILE,
                           self.repokeys,
                           perms=0600,
                           append=False)

        # determine parameters for update
        cmd = constants.CMD_PYTHON
        if self.scriptspath is not None:
            pyfile = os.path.join(
                self.scriptspath,
                os.path.basename(constants.CMD_L2TPGW_UPDATE_PRODUCT))
        else:
            pyfile = constants.CMD_L2TPGW_UPDATE_PRODUCT
        _log.info('update command: %s, script: %s, arguments: %s' %
                  (cmd, pyfile, self.importpath))

        # failure timer for running script
        self.script_timeout_call = reactor.callLater(
            constants.UPDATE_SCRIPT_TIMEOUT, _script_timeout)

        # start update process
        u = UpdateProcessProtocol()
        self._update_process_protocol = u
        reactor.spawnProcess(
            u,
            executable=cmd,
            args=[cmd, pyfile, '--import-path', self.importpath],
            env=None,  # Uses os.environ if set to None, default is empty
            usePTY=1)
        d = u.waitCompleted()
        d.addCallback(_update_completed)
        self.run_update_deferred = d
        return d
Example #50
0
 def _start(self):
     """Stuff to be done after all outlets have started but before the inlet is notified"""
     self.log.debug('Spawning new FFMpeg process')
     reactor.spawnProcess(self.protocol,self.ffmpegbin,self.fargs)
Example #51
0
 def list_(self, request, handler):
     # PackageKit is buggy, it crashs when doing a GetPackages:
     # (jerem: ~) pkcon get-packages 
     # Getting packages              [=========================]         
     # Loading cache                 [=========================]         
     # Querying                      [                       ==]         The daemon crashed mid-transaction!
     # (jerem: ~/Downloads/PackageKit-0.6.10/src) sudo ./packagekitd --verbose
     # 10:30:12	PackageKit          auto-setting status based on info available
     # 10:30:12	PackageKit          emit package available, loadwatch;1.0+1.1alpha1-5;i386;maverick, Run a program using only idle cycles
     # terminate called after throwing an instance of 'std::logic_error'
     #   what():  basic_string::_S_construct NULL not valid
     # Aborted
     # (jerem: ~/Downloads/PackageKit-0.6.10/src)
     #        res = []
     #        def get_package(i, p_id, summary):
     #            name = p_id.split(';')[0]
     #            log.msg('[%s]' % name)
     #            res.append({'name': name})
     #        def finished(exit, runtime):
     #            log.msg('List Finished [%s] [%s]' % (exit, runtime))
     #            handler.send_data(request, res)
     #            if exit == 'success':
     #                handler.send_meta(OPERATION_SUCCESSFUL, request=request)
     #            else:
     #                handler.send_meta(OPERATION_FAILED, request=request)
     #        t = Transaction(request, handler)
     #        t._s_Package = get_package
     #        t._s_Finished = finished
     #        t.run('GetPackages', 'installed')
     if self._refresh_cache_needed == True:
         self._silent_refresh_cache(partial(self.list_, request, handler))
         return
     _silent_remove = self._silent_remove
     class DpkgGetSelecions(protocol.ProcessProtocol):
         out = ''
         
         def outReceived(self, data):
             self.out += data
         
         def errReceived(self, data):
             log.msg("[DpkgGetSelecions] [stderr] %s" % data)
         
         def processEnded(self, status_object):
             res = []
             webapps_to_be_deleted = []
             for p in self.out.split('\n'):
                 p = p.strip()
                 if len(p):
                     p, status = p.split(':')
                     if status.startswith('install'):
                         if p.startswith('jolicloud-webapp-'):
                             webapps_to_be_deleted.append(p)
                         else:
                             res.append({'name': p})
             # We add webapps and remove the legacy packages
             dst_path = '%s/.local/share/icons' % os.getenv('HOME')
             if not os.path.exists(dst_path):
                 os.makedirs(dst_path)
             if len(webapps_to_be_deleted):
                 log.msg('Deleting legacy webapps packages: %s' % webapps_to_be_deleted)
                 for webapp in webapps_to_be_deleted:
                     src = '/usr/share/pixmaps/%s.png' % webapp
                     dst = '%s/.local/share/icons/%s.png' % (os.getenv('HOME'), webapp)
                     log.msg('Copying icon %s to %s.' % (src, dst))
                     shutil.copy(src, dst)
                 _silent_remove(webapps_to_be_deleted)
             for icon in os.listdir(dst_path):
                 if icon.startswith('jolicloud-webapp-'):
                     res.append({'name': icon.split('.')[0]})
             handler.send_data(request, res)
             handler.send_meta(OPERATION_SUCCESSFUL, request=request)
             log.msg("[DpkgGetSelecions] [processEnded] status = %d" % status_object.value.exitCode)
     reactor.spawnProcess(
         DpkgGetSelecions(),
         '/usr/bin/dpkg-query',
         ['dpkg-query', '-W', '--showformat=${Package}:${Status}\n']
     )
Example #52
0
    def start(self):
        # render properties
        command = self.command
        # set up argv
        if isinstance(command, (str, bytes)):
            if runtime.platformType == 'win32':
                # allow %COMSPEC% to have args
                argv = os.environ['COMSPEC'].split()
                if '/c' not in argv:
                    argv += ['/c']
                argv += [command]
            else:
                # for posix, use /bin/sh. for other non-posix, well, doesn't
                # hurt to try
                argv = ['/bin/sh', '-c', command]
        else:
            if runtime.platformType == 'win32':
                # allow %COMSPEC% to have args
                argv = os.environ['COMSPEC'].split()
                if '/c' not in argv:
                    argv += ['/c']
                argv += list(command)
            else:
                argv = command

        self.stdio_log = stdio_log = self.addLog("stdio")

        if isinstance(command, str):
            stdio_log.addHeader(command.strip() + "\n\n")
        else:
            stdio_log.addHeader(" ".join(command) + "\n\n")
        stdio_log.addHeader("** RUNNING ON BUILDMASTER **\n")
        stdio_log.addHeader(" in dir %s\n" % os.getcwd())
        stdio_log.addHeader(" argv: %s\n" % (argv,))
        self.step_status.setText(self.describe())

        if self.env is None:
            env = os.environ
        else:
            assert isinstance(self.env, dict)
            env = self.env
            for key, v in iteritems(self.env):
                if isinstance(v, list):
                    # Need to do os.pathsep translation.  We could either do that
                    # by replacing all incoming ':'s with os.pathsep, or by
                    # accepting lists.  I like lists better.
                    # If it's not a string, treat it as a sequence to be
                    # turned in to a string.
                    self.env[key] = os.pathsep.join(self.env[key])

            # do substitution on variable values matching pattern: ${name}
            p = re.compile(r'\${([0-9a-zA-Z_]*)}')

            def subst(match):
                return os.environ.get(match.group(1), "")
            newenv = {}
            for key, v in iteritems(env):
                if v is not None:
                    if not isinstance(v, basestring):
                        raise RuntimeError("'env' values must be strings or "
                                           "lists; key '%s' is incorrect" % (key,))
                    newenv[key] = p.sub(subst, env[key])
            env = newenv
        stdio_log.addHeader(" env: %r\n" % (env,))

        # TODO add a timeout?
        self.process = reactor.spawnProcess(self.LocalPP(self), argv[0], argv,
                                            path=self.masterWorkdir, usePTY=self.usePTY, env=env)
Example #53
0
    def spawn(self, patterns):  # pylint: disable=too-many-branches
        systype = util.get_systype()
        server = self.debug_options.get("server")
        if not server:
            return None
        server = self.apply_patterns(server, patterns)
        server_executable = server["executable"]
        if not server_executable:
            return None
        if server["cwd"]:
            server_executable = join(server["cwd"], server_executable)
        if ("windows" in systype and not server_executable.endswith(".exe")
                and isfile(server_executable + ".exe")):
            server_executable = server_executable + ".exe"

        if not isfile(server_executable):
            server_executable = where_is_program(server_executable)
        if not isfile(server_executable):
            raise DebugInvalidOptionsError(
                "\nCould not launch Debug Server '%s'. Please check that it "
                "is installed and is included in a system PATH\n\n"
                "See documentation or contact [email protected]:\n"
                "https://docs.platformio.org/page/plus/debugging.html\n" %
                server_executable)

        self._debug_port = ":3333"
        openocd_pipe_allowed = all(
            [not self.debug_options["port"], "openocd" in server_executable])
        if openocd_pipe_allowed:
            args = []
            if server["cwd"]:
                args.extend(["-s", server["cwd"]])
            args.extend([
                "-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
            ])
            args.extend(server["arguments"])
            str_args = " ".join(
                [arg if arg.startswith("-") else '"%s"' % arg for arg in args])
            self._debug_port = '| "%s" %s' % (server_executable, str_args)
            self._debug_port = fs.to_unix_path(self._debug_port)
        else:
            env = os.environ.copy()
            # prepend server "lib" folder to LD path
            if ("windows" not in systype and server["cwd"]
                    and isdir(join(server["cwd"], "lib"))):
                ld_key = ("DYLD_LIBRARY_PATH"
                          if "darwin" in systype else "LD_LIBRARY_PATH")
                env[ld_key] = join(server["cwd"], "lib")
                if os.environ.get(ld_key):
                    env[ld_key] = "%s:%s" % (env[ld_key],
                                             os.environ.get(ld_key))
            # prepend BIN to PATH
            if server["cwd"] and isdir(join(server["cwd"], "bin")):
                env["PATH"] = "%s%s%s" % (
                    join(server["cwd"], "bin"),
                    os.pathsep,
                    os.environ.get("PATH", os.environ.get("Path", "")),
                )

            self._transport = reactor.spawnProcess(
                self,
                server_executable,
                [server_executable] + server["arguments"],
                path=server["cwd"],
                env=env,
            )
            if "mspdebug" in server_executable.lower():
                self._debug_port = ":2000"
            elif "jlink" in server_executable.lower():
                self._debug_port = ":2331"
            elif "qemu" in server_executable.lower():
                self._debug_port = ":1234"

        return self._transport
Example #54
0
 def spawn(self, *args):
     self.d = defer.Deferred()
     from twisted.internet import reactor
     reactor.spawnProcess(self, sys.executable,
                          [sys.executable] + list(args), os.environ)
     return self.d
Example #55
0
#!/usr/bin/python
# -*- coding: utf-8 -*-

import irc, bot
from twisted.internet import reactor#, protocol
import sys

irc_server = 'chat.freenode.net'
irc_server_port = 6666
irc_nick = 'bot'
irc_channel = '#kingdomplantar'

pp = bot.MyPP()
reactor.spawnProcess(pp, "java", ["java", "-Xmx1024M", "-Xms1024M", "-jar", "minecraft_server.jar", "nogui"], {'LANG':'ru_RU.UTF-8'})
#reactor.run()
print sys.getdefaultencoding()
#chat_module = chat.chat(irc_channel)

irc_module = irc.IrcBotFactory(irc_channel, irc_nick, pp)
pp.irc_proto = irc_module

#delayer_module = delayer.delayer(irc_module, None)

#icq_module = icq.oscarAuth(chat_module, icq_uin, icq_pass, icq_server, icq_server_port, delayer_module)

#chat_module.delayer = delayer_module
reactor.connectTCP(irc_server, irc_server_port, irc_module)
reactor.run()
Example #56
0
 def open(self):
     t = self.transport
     self._protocol = LPRProtocol(t, t.protocol)
     self.printer = reactor.spawnProcess(self._protocol, 'lpr',
                                         ['lpr', '-P', t.config.printer])
Example #57
0
def run():
    """ Runs the Peekaboo daemon. """
    arg_parser = ArgumentParser(
        description=
        'Peekaboo Extended Email Attachment Behavior Observation Owl')
    arg_parser.add_argument('-c',
                            '--config',
                            action='store',
                            required=False,
                            default=os.path.join('./peekaboo.conf'),
                            help='The configuration file for Peekaboo.')
    arg_parser.add_argument(
        '-d',
        '--debug',
        action='store_true',
        required=False,
        default=False,
        help=
        "Run Peekaboo in debug mode regardless of what's specified in the configuration."
    )
    arg_parser.add_argument(
        '-D',
        '--daemon',
        action='store_true',
        required=False,
        default=False,
        help=
        'Run Peekaboo in daemon mode (suppresses the logo to be written to STDOUT).'
    )
    args = arg_parser.parse_args()

    if not args.daemon:
        print(_owl)
    else:
        print('Starting Peekaboo %s.' % __version__)

    # read configuration
    if not os.path.isfile(args.config):
        print('Failed to read config, files does not exist.'
              )  # logger doesn't exist here
        sys.exit(1)
    config = parse_config(args.config)

    # Check if CLI arguments override the configuration
    if args.debug:
        config.change_log_level('DEBUG')

    # Log the configuration options if we are in debug mode
    if config.log_level == logging.DEBUG:
        logger.debug(config.__str__())

    # establish a connection to the database
    try:
        db_con = PeekabooDatabase(config.db_url)
        config.add_db_con(db_con)
    except PeekabooDatabaseError as e:
        logging.exception(e)
        sys.exit(1)
    except Exception as e:
        logger.critical('Failed to establish a connection to the database.')
        logger.exception(e)
        sys.exit(1)

    # Import debug module if we are in debug mode
    if config.use_debug_module:
        from peekaboo.debug import peekaboo_debugger
        peekaboo_debugger()

    if os.getuid() == 0:
        logger.warning('Peekaboo should not run as root.')
        # drop privileges to user
        os.setgid(grp.getgrnam(config.group)[2])
        os.setuid(pwd.getpwnam(config.user)[2])
        # set $HOME to the users home directory
        # (VirtualBox must access the configs)
        os.environ['HOME'] = pwd.getpwnam(config.user)[5]
        logger.info("Dropped privileges to user %s and group %s" %
                    (config.user, config.group))
        logger.debug('$HOME is ' + os.environ['HOME'])

    # write PID file
    pid = str(os.getpid())
    with open(config.pid_file, "w") as pidfile:
        pidfile.write("%s\n" % pid)

    systemd = SystemdNotifier()
    server = PeekabooStreamServer(config.sock_file,
                                  PeekabooStreamRequestHandler)
    runner = Thread(target=server.serve_forever)
    runner.daemon = True

    try:
        runner.start()
        logger.info('Peekaboo server is listening on %s' %
                    server.server_address)

        os.chmod(
            config.sock_file, stat.S_IWOTH | stat.S_IREAD | stat.S_IWRITE
            | stat.S_IRGRP | stat.S_IWGRP | stat.S_IWOTH)

        # Run Cuckoo sandbox, parse log output, and report back of Peekaboo.
        # If this dies Peekaboo dies, since this is the main thread.
        srv = CuckooServer()
        reactor.spawnProcess(srv, config.interpreter,
                             [config.interpreter, '-u', config.cuckoo_exec])
        systemd.notify("READY=1")
        reactor.run()
    except Exception as e:
        logger.exception(e)
    finally:
        server.shutdown()
        self.outdata += data
        lines = self.outdata.split('\n')
        for line in lines[:-1]:
            self.outLineReceived(line)

        self.outdata = lines[-1]

    def errReceived(self, data):
        self.errdata += data
        lines = self.errdata.split('\n')
        for line in lines[:-1]:
            self.errLineReceived(line)

        self.errdata = lines[-1]

    def dataReceived(self, data):
        self.outReceived(data)


def spawnNonDaemonProcess(reactor, protocol, executable, args):
    proc = reactor.spawnProcess(protocol, executable, args)
    reactor.addSystemEventTrigger('before', 'shutdown',
                                  lambda: proc.signalProcess('TERM'))


if __name__ == "__main__":
    from twisted.internet import reactor
    proc = reactor.spawnProcess(TerminalEchoProcessProtocol(), 'nc',
                                ['nc', '-l', '1234'])
    reactor.run()
Example #59
0
    def callWithVersion(self, func):
        command = self._gerritCmd("version")
        callback = lambda gerrit_version: self.processVersion(gerrit_version, func)

        reactor.spawnProcess(self.VersionPP(callback), command[0], command)
Example #60
0
                error(inst)
                defer.returnValue(None)

        try:
            temp_fd, temp_file = mkstemp(suffix=".pcap", dir=dir)
            os.chmod(temp_file, 0666)
        except OSError, inst:
            error(inst)
            defer.returnValue(None)

        self._proc = _TcpdumpProtocol()
        debug("spawnProcess: %s" % cmd)
        reactor.spawnProcess(self._proc,
                             self._daemon,
                             args=cmd,
                             path='/',
                             childFDs={
                                 1: temp_fd,
                                 2: "r"
                             })
        os.close(temp_fd)

        success, status, stderr = yield self._proc.deferred()

        if not success:
            error("Tcpdump failed (exit status: %s):" % status)
            error(stderr)
            os.unlink(temp_file)
            defer.returnValue(None)
        else:
            info("Tcpdump started.")
            defer.returnValue(temp_file)