Example #1
0
 def test_include_of_single_file(self):
     with open("test1.txt", "w") as f:
         f.write("hello world")
     filedata = pack_include_files(["test1.txt"])
     os.makedirs("outdir")
     os.chdir("outdir")
     unpack_include_files(filedata)
     self.assertEquals(os.listdir("."), ["test1.txt"])
Example #2
0
 def test_include_of_single_file_with_explicit_location(self):
     os.makedirs("indir")
     os.makedirs("outdir")
     with open("indir/test1.txt", "w") as f:
         f.write("hello world")
     filedata = pack_include_files(["*.txt"], "./indir")
     unpack_include_files(filedata, "./outdir")
     self.assertEquals(os.listdir("outdir"), ["test1.txt"])
Example #3
0
 def test_include_of_single_file_with_explicit_location(self):
     os.makedirs("indir")
     os.makedirs("outdir")
     with open("indir/test1.txt", "w") as f:
         f.write("hello world")
     filedata = pack_include_files(["*.txt"], "./indir")
     unpack_include_files(filedata, "./outdir")
     self.assertEquals(os.listdir("outdir"), ["test1.txt"])
Example #4
0
 def test_include_of_single_file(self):
     with open("test1.txt", "w") as f:
         f.write("hello world")
     filedata = pack_include_files(["test1.txt"])
     os.makedirs("outdir")
     os.chdir("outdir")
     unpack_include_files(filedata)
     self.assertEquals(os.listdir("."), ["test1.txt"])
Example #5
0
 def test_preservation_of_file_mode(self):
     with open("test1.sh", "w") as f:
         f.write("#!/bin/sh\necho 'hello world'\n")
     os.chmod("test1.sh", 0755)
     with open("private.txt", "w") as f:
         f.write("TOP SECRET DATA\n")
     os.chmod("private.txt", 0600)
     filedata = pack_include_files(["*.*"])
     os.unlink("test1.sh")
     os.unlink("private.txt")
     unpack_include_files(filedata)
     self.assertEquals(os.stat("test1.sh").st_mode & 0777, 0755)
     self.assertEquals(os.stat("private.txt").st_mode & 0777, 0600)
Example #6
0
 def test_preservation_of_file_mode(self):
     with open("test1.sh", "w") as f:
         f.write("#!/bin/sh\necho 'hello world'\n")
     os.chmod("test1.sh", 0755)
     with open("private.txt", "w") as f:
         f.write("TOP SECRET DATA\n")
     os.chmod("private.txt", 0600)
     filedata = pack_include_files(["*.*"])
     os.unlink("test1.sh")
     os.unlink("private.txt")
     unpack_include_files(filedata)
     self.assertEquals(os.stat("test1.sh").st_mode & 0777, 0755)
     self.assertEquals(os.stat("private.txt").st_mode & 0777, 0600)
Example #7
0
 def test_relative_globbing_and_direcotry_includes(self):
     os.makedirs("indir")
     os.makedirs("outdir")
     os.chdir("indir")
     with open("test1.txt", "w") as f:
         f.write("hello world")
     with open("test2.txt", "w") as f:
         f.write("hello world")
     os.makedirs("subdir/subsubdir")
     os.chdir("subdir/subsubdir")
     with open("test3.txt", "w") as f:
         f.write("hello world")
     os.chdir("../../../outdir")
     filedata = pack_include_files(["../indir/*.txt", "../indir/*dir"])
     unpack_include_files(filedata)
     self.assertEquals(sorted(os.listdir(".")),
                       ["subdir", "test1.txt", "test2.txt"])
     self.assertEquals(os.listdir("./subdir"), ["subsubdir"])
     self.assertEquals(os.listdir("./subdir/subsubdir"), ["test3.txt"])
Example #8
0
 def test_relative_globbing_and_direcotry_includes(self):
     os.makedirs("indir")
     os.makedirs("outdir")
     os.chdir("indir")
     with open("test1.txt", "w") as f:
         f.write("hello world")
     with open("test2.txt", "w") as f:
         f.write("hello world")
     os.makedirs("subdir/subsubdir")
     os.chdir("subdir/subsubdir")
     with open("test3.txt", "w") as f:
         f.write("hello world")
     os.chdir("../../../outdir")
     filedata = pack_include_files(["../indir/*.txt", "../indir/*dir"])
     unpack_include_files(filedata)
     self.assertEquals(sorted(os.listdir(".")),
                       ["subdir", "test1.txt", "test2.txt"])
     self.assertEquals(os.listdir("./subdir"), ["subsubdir"])
     self.assertEquals(os.listdir("./subdir/subsubdir"), ["test3.txt"])
Example #9
0
    def _prepare_filesystem(self):
        test_dir = self.args.get('test_dir')

        # in standalone mode we take care of creating
        # the files
        if test_dir is not None:
            if not self.slave:
                test_dir = test_dir + '-%d' % os.getpid()

                if not os.path.exists(test_dir):
                    os.makedirs(test_dir)

                # Copy over the include files, if any.
                # It's inefficient to package them up and then immediately
                # unpackage them, but this has the advantage of ensuring
                # consistency with how it's done in the distributed case.
                includes = self.args.get('include_file', [])
                filedata = pack_include_files(includes)
                unpack_include_files(filedata, test_dir)

            # change to execution directory if asked
            logger.debug('chdir %r' % test_dir)
            os.chdir(test_dir)
Example #10
0
    def _prepare_filesystem(self):
        test_dir = self.args.get('test_dir')

        # in standalone mode we take care of creating
        # the files
        if test_dir is not None:
            if not self.slave:
                test_dir = test_dir + '-%d' % os.getpid()

                if not os.path.exists(test_dir):
                    os.makedirs(test_dir)

                # Copy over the include files, if any.
                # It's inefficient to package them up and then immediately
                # unpackage them, but this has the advantage of ensuring
                # consistency with how it's done in the distributed case.
                includes = self.args.get('include_file', [])
                filedata = pack_include_files(includes)
                unpack_include_files(filedata, test_dir)

            # change to execution directory if asked
            logger.debug('chdir %r' % test_dir)
            os.chdir(test_dir)
Example #11
0
class Client(object):
    """Class to drive a Loads cluster.

    Options:

    - **frontend**: ZMQ socket to call.
    - **timeout**: maximum allowed time for a job to run.
      Defaults to 1s.
    - **timeout_max_overflow**: maximum timeout overflow allowed.
      Defaults to 1.5s
    - **timeout_overflows**: number of times in a row the timeout value
      can be overflowed per agent. The client keeps a counter of
      executions that were longer than the regular timeout but shorter
      than **timeout_max_overflow**. When the number goes over
      **timeout_overflows**, the usual TimeoutError is raised.
      When a agent returns on time, the counter is reset.
    - **ssh** ssh tunnel server.
    """
    def __init__(self,
                 frontend=DEFAULT_FRONTEND,
                 timeout=DEFAULT_TIMEOUT,
                 timeout_max_overflow=DEFAULT_TIMEOUT_MOVF,
                 timeout_overflows=DEFAULT_TIMEOUT_OVF,
                 debug=False,
                 ctx=None,
                 ssh=None):
        self.ssh = ssh
        self.kill_ctx = ctx is None
        self.ctx = ctx or zmq.Context()
        self.frontend = frontend
        self.master = self.ctx.socket(zmq.REQ)
        if ssh:
            from zmq import ssh
            ssh.tunnel_connection(self.master, frontend, self.ssh)
        else:
            self.master.connect(frontend)

        self.poller = zmq.Poller()
        self.poller.register(self.master, zmq.POLLIN)
        self.timeout = timeout * 1000
        self.lock = threading.Lock()
        self.timeout_max_overflow = timeout_max_overflow * 1000
        self.timeout_overflows = timeout_overflows
        self.debug = debug

    def execute(self, job, timeout=None, log_exceptions=True):
        """Runs the job

        Options:

        - **job**: Job to be performed. Can be a :class:`Job`
          instance or a string. If it's a string a :class:`Job` instance
          will be automatically created out of it.
        - **timeout**: maximum allowed time for a job to run.
          If not provided, uses the one defined in the constructor.

        If the job fails after the timeout, raises a :class:`TimeoutError`.

        This method is thread-safe and uses a lock. If you need to execute a
        lot of jobs simultaneously on a broker, use the :class:`Pool` class.

        """
        if timeout is None:
            timeout = self.timeout_max_overflow

        try:
            duration, res = timed(self.debug)(self._execute)(job, timeout)
        except Exception:
            # logged, connector replaced.
            if log_exceptions:
                logger.exception('Failed to execute the job.')
            raise

        if 'error' in res:
            raise ValueError(res['error'])

        return res['result']

    def close(self):
        self.master.setsockopt(zmq.LINGER, 0)
        self.master.close()

        if self.kill_ctx:
            self.ctx.destroy(0)

    def _execute(self, job, timeout=None):

        if not isinstance(job, Message):
            job = Message(**job)

        if timeout is None:
            timeout = self.timeout_max_overflow

        with self.lock:
            send(self.master, job.serialize())

            while True:
                try:
                    socks = dict(self.poller.poll(timeout))
                    break
                except zmq.ZMQError as e:
                    if e.errno != errno.EINTR:
                        raise

        if socks.get(self.master) == zmq.POLLIN:
            data = recv(self.master)
            return json.loads(data)

        raise TimeoutError(timeout)

    def run(self, args, async=True):
        # let's ask the broker how many agents it has
        res = self.execute({'command': 'LIST'})

        # do we have enough ?
        agents = len(res)
        agents_needed = args.get('agents', 1)
        if len(res) < agents_needed:
            msg = 'Not enough agents running on that broker. '
            msg += 'Asked: %d, Got: %d' % (agents_needed, agents)

            raise ExecutionError(msg)

        # let's copy over some files if we need
        includes = args.get('include_file', [])

        cmd = {
            'command': 'CTRL_RUN',
            'async': async,
            'agents': agents_needed,
            'args': args
        }

        cmd['filedata'] = pack_include_files(includes)
        res = self.execute(cmd)
        logger.debug('Run on its way')
        logger.debug(res)
        return res