Exemplo n.º 1
0
def test_communicate_with_poll():
    # This test was being skipped since git 25812fca8, I don't there's
    # a need to do this. The original comment:
    #
    # https://github.com/eventlet/eventlet/pull/24
    # `eventlet.green.subprocess.Popen.communicate()` was broken
    # in Python 2.7 because the usage of the `select` module was moved from
    # `_communicate` into two other methods `_communicate_with_select`
    # and `_communicate_with_poll`. Link to 2.7's implementation:
    # http://hg.python.org/cpython/file/2145593d108d/Lib/subprocess.py#l1255

    p = subprocess.Popen(
        [sys.executable, '-c', 'import time; time.sleep(0.5)'],
        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    t1 = time.time()
    eventlet.with_timeout(0.1, p.communicate, timeout_value=True)
    tdiff = time.time() - t1
    assert 0.1 <= tdiff <= 0.2, 'did not stop within allowed time'
Exemplo n.º 2
0
    def lldp_neighbours_list(self):
        json_obj = json
        lldp_neighbours = []
        p = subprocess.Popen(["lldpcli", "-f", "json", "show", "neighbor",
                              "detail"], stdout=subprocess.PIPE)
        data = json_obj.loads(p.communicate()[0])

        lldp = data['lldp'][0]

        if not lldp.get('interface'):
            return lldp_neighbours

        for iface in lldp['interface']:
            neighbour_attrs = self._lldpd_get_attrs(iface)
            neighbour = plugin.Neighbour(**neighbour_attrs)
            lldp_neighbours.append(neighbour)

        return lldp_neighbours
Exemplo n.º 3
0
    def _run_restore(self):
        with self.restore_stream as stream:
            self.process = subprocess.Popen(self.restore_cmd,
                                            shell=True,
                                            stdin=subprocess.PIPE,
                                            stderr=subprocess.PIPE)
            self.pid = self.process.pid
            content_length = 0
            chunk = stream.read(CHUNK_SIZE)
            while chunk:
                self.process.stdin.write(chunk)
                content_length += len(chunk)
                chunk = stream.read(CHUNK_SIZE)
            self.process.stdin.close()
            LOG.info("Restored %s bytes from swift via xbstream." %
                     content_length)

        return content_length
Exemplo n.º 4
0
    def _rsync(self, args):
        """
        Execute the rsync binary to replicate a partition.

        :returns: return code of rsync process. 0 is successful
        """
        start_time = time.time()
        ret_val = None
        try:
            with Timeout(self.rsync_timeout):
                proc = subprocess.Popen(args,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.STDOUT)
                results = proc.stdout.read()
                ret_val = proc.wait()
        except Timeout:
            self.logger.error(_("Killing long-running rsync: %s"), str(args))
            proc.kill()
            return 1  # failure response code
        total_time = time.time() - start_time
        for result in results.split('\n'):
            if result == '':
                continue
            if result.startswith('cd+'):
                continue
            if not ret_val:
                self.logger.info(result)
            else:
                self.logger.error(result)
        if ret_val:
            error_line = _('Bad rsync return code: %(ret)d <- %(args)s') % \
                {'args': str(args), 'ret': ret_val}
            if self.rsync_error_log_line_length:
                error_line = error_line[:self.rsync_error_log_line_length]
            self.logger.error(error_line)
        else:
            log_method = self.logger.info if results else self.logger.debug
            log_method(
                _("Successful rsync of %(src)s at %(dst)s (%(time).03f)"), {
                    'src': args[-2],
                    'dst': args[-1],
                    'time': total_time
                })
        return ret_val
Exemplo n.º 5
0
def delete_helm_release(release):
    """Delete helm release

    This method deletes a helm release without --purge which removes
    all associated resources from kubernetes but not from the store(ETCD)

    In the scenario of updating application, the method is needed to clean
    up the releases if there were deployed releases in the old application
    but not in the new application

    :param release: the name of the helm release
    """
    # NOTE: This mechanism deletes armada/tiller managed releases.
    # This could be adapted to also delete helm v3 releases using
    # 'helm uninstall'.
    env = os.environ.copy()
    env['PATH'] = '/usr/local/sbin:' + env['PATH']
    env['KUBECONFIG'] = kubernetes.KUBERNETES_ADMIN_CONF
    helm_cmd = subprocess.Popen(
        ['helmv2-cli', '--',
         'helm', 'delete', release, '--tiller-connection-timeout', '5'],
        env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    timer = threading.Timer(20, kill_process_and_descendants, [helm_cmd])

    try:
        timer.start()
        out, err = helm_cmd.communicate()
        if err and not out:
            if ("deletion completed" or "not found" or "is already deleted") in err:
                LOG.debug("Release %s not found or deleted already" % release)
                return True
            raise exception.HelmTillerFailure(
                reason="Failed to delete release: %s" % err)
        elif not err and not out:
            err_msg = "Failed to delete release. " \
                      "Helm tiller response timeout."
            raise exception.HelmTillerFailure(reason=err_msg)
        return True
    except Exception as e:
        LOG.error("Failed to delete release: %s" % e)
        raise exception.HelmTillerFailure(
            reason="Failed to delete release: %s" % e)
    finally:
        timer.cancel()
Exemplo n.º 6
0
def run_command(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False,
                cwd=None, env=None):
    """
    Run the provided command in a subprocess and wait until it completes.

    :param cmd: Command to run.
    :type cmd: ``str`` or ``list``

    :param stdin: Process stdin.
    :type stdin: ``object``

    :param stdout: Process stdout.
    :type stdout: ``object``

    :param stderr: Process stderr.
    :type stderr: ``object``

    :param shell: True to use a shell.
    :type shell ``boolean``

    :param cwd: Optional working directory.
    :type cwd: ``str``

    :param env: Optional environment to use with the command. If not provided,
                environment from the current process is inherited.
    :type env: ``dict``

    :rtype: ``tuple`` (exit_code, stdout, stderr)
    """
    assert isinstance(cmd, (list, tuple) + six.string_types)

    if not env:
        env = os.environ.copy()

    process = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr,
                               env=env, cwd=cwd, shell=shell)
    stdout, stderr = process.communicate()
    exit_code = process.returncode

    if six.PY3:
        return (exit_code, stdout.decode(), stderr.decode())
    else:
        return (exit_code, stdout, stderr)
Exemplo n.º 7
0
def retrieve_helm_v3_releases():
    helm_list = subprocess.Popen([
        'helm', '--kubeconfig', kubernetes.KUBERNETES_ADMIN_CONF, 'list',
        '--all-namespaces', '--output', 'yaml'
    ],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
    timer = threading.Timer(20, kill_process_and_descendants, [helm_list])

    try:
        timer.start()
        out, err = helm_list.communicate()
        if helm_list.returncode != 0:
            if err:
                raise exception.HelmTillerFailure(reason=err)

            # killing the subprocesses with +kill() when timer expires returns EBADF
            # because the pipe is closed, but no error string on stderr.
            if helm_list.returncode == -9:
                raise exception.HelmTillerFailure(
                    reason="helm list operation timed out after "
                    "20 seconds. Terminated by threading timer.")
            raise exception.HelmTillerFailure(
                reason="helm list operation failed without error "
                "message, errno=%s" % helm_list.returncode)

        deployed_releases = {}
        if out:
            releases = yaml.safe_load(out)
            for r in releases:
                r_name = r.get('name')
                r_version = r.get('revision')
                r_namespace = r.get('namespace')

                deployed_releases.setdefault(r_name, {}).update(
                    {r_namespace: r_version})

        return deployed_releases
    except Exception as e:
        raise exception.HelmTillerFailure(
            reason="Failed to retrieve helmv3 releases: %s" % e)
    finally:
        timer.cancel()
Exemplo n.º 8
0
 def test_st2auth(self):
     port = random.randint(10000, 30000)
     cmd = (
         'gunicorn st2auth.wsgi:application -k eventlet -b "127.0.0.1:%s" --workers 1'
         % port)
     env = os.environ.copy()
     env["ST2_CONFIG_PATH"] = ST2_CONFIG_PATH
     process = subprocess.Popen(cmd,
                                env=env,
                                shell=True,
                                preexec_fn=os.setsid)
     try:
         self.add_process(process=process)
         eventlet.sleep(8)
         self.assertProcessIsRunning(process=process)
         response = requests.post("http://127.0.0.1:%s/tokens" % (port))
         self.assertEqual(response.status_code, http_client.UNAUTHORIZED)
     finally:
         kill_process(process)
Exemplo n.º 9
0
def retrieve_helm_releases():
    """Retrieve the deployed helm releases from tiller

    Get the name, namespace and version for the deployed releases
    by querying helm tiller
    :return: a dict of deployed helm releases
    """
    helm_list = subprocess.Popen(
        ['helm', '--kubeconfig', '/etc/kubernetes/admin.conf',
         'list', '--output', 'yaml'],
        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    timer = threading.Timer(20, helm_list.kill)

    try:
        releases = {}
        deployed_releases = {}

        timer.start()
        out, err = helm_list.communicate()
        if out and not err:
            output = yaml.safe_load(out)
            releases = output.get('Releases', None)
        elif err and not out:
            raise exception.HelmTillerFailure(
                reason="Failed to retrieve releases: %s" % err)
        elif not err and not out:
            err_msg = "Failed to retrieve releases. " \
                      "Helm tiller response timeout."
            raise exception.HelmTillerFailure(reason=err_msg)

        for r in releases:
            r_name = r.get('Name')
            r_version = r.get('Revision')
            r_namespace = r.get('Namespace')

            deployed_releases.setdefault(r_name, {}).update(
                {r_namespace: r_version})
    except Exception as e:
        raise exception.HelmTillerFailure(
            reason="Failed to retrieve releases: %s" % e)
    finally:
        timer.cancel()
    return deployed_releases
Exemplo n.º 10
0
def cms_verify(formatted, signing_cert_file_name, ca_file_name):
    """
        verifies the signature of the contents IAW CMS syntax
    """
    _ensure_subprocess()
    process = subprocess.Popen([
        "openssl", "cms", "-verify", "-certfile", signing_cert_file_name,
        "-CAfile", ca_file_name, "-inform", "PEM", "-nosmimecap", "-nodetach",
        "-nocerts", "-noattr"
    ],
                               stdin=subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    output, err = process.communicate(formatted)
    retcode = process.poll()
    if retcode:
        LOG.error(_('Verify error: %s') % err)
        raise subprocess.CalledProcessError(retcode, "openssl", output=err)
    return output
Exemplo n.º 11
0
def test_subprocess_wait():
    # https://bitbucket.org/eventlet/eventlet/issue/89
    # In Python 3.3 subprocess.Popen.wait() method acquired `timeout`
    # argument.
    # RHEL backported it to their Python 2.6 package.
    cmd = [sys.executable, "-c", "import time; time.sleep(0.5)"]
    p = subprocess.Popen(cmd)
    ok = False
    t1 = time.time()
    try:
        p.wait(timeout=0.1)
    except subprocess.TimeoutExpired as e:
        str(e)  # make sure it doesnt throw
        assert e.cmd == cmd
        assert e.timeout == 0.1
        ok = True
    tdiff = time.time() - t1
    assert ok, 'did not raise subprocess.TimeoutExpired'
    assert 0.1 <= tdiff <= 0.2, 'did not stop within allowed time'
Exemplo n.º 12
0
def salint():
    "Spamassassin lint"
    logger = salint.get_logger()
    logger.info(_("Running Spamassassin lint checks"))
    lint = []
    saprefs = config.get('ms.saprefs',
                         '/etc/MailScanner/spam.assassin.prefs.conf')

    pipe1 = subprocess.Popen(
        ['spamassassin', '-x', '-D', '-p', saprefs, '--lint'],
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE)
    while True:
        line = pipe1.stderr.readline()
        if not line:
            break
        lint.append(line)
    pipe1.wait(timeout=2)
    return lint
Exemplo n.º 13
0
def is_selection_valid(size: int) -> bool:
    """
    Checks if there exists a valid solution for the currently set queens, using MiniZinc and the CP solver Gecode.

    First, it creates a MiniZinc data file, that specifies the board size N, and the currently selected queen
    positions. Then, MiniZinc is run with the data file, and if it does not print "unsatisfiable", a solution was
    found.
    :param size: the size of the board
    :return: True if a solution exists, and False otherwise.
    """
    path = os.path.dirname(os.path.realpath(__file__)) + "/minizinc/"
    create_data_file(size, path)
    with subprocess.Popen(["minizinc", "--solver", "gecode", path + "queens.mzn", "--data", path + "data.dzn"],
                          stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1,
                          universal_newlines=True) as p:
        for line in p.stdout:
            if "UNSATISFIABLE" in line:
                return False
    return True
def launch(*cmd, **kwargs):
    shell = kwargs.pop('shell', False)
    if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
        kwargs['root_helper'] = _get_root_helper()
    root_helper = kwargs['root_helper']
    if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
        if shell:
            # root helper has to be injected into the command string
            cmd = [' '.join((root_helper, cmd[0]))] + list(cmd[1:])
        else:
            # root helper has to be tokenized into argument list
            cmd = shlex.split(root_helper) + list(cmd)
    try:
        subprocess.Popen(cmd, shell=shell)
    except OSError as err:
        f = _('Got an OSError\ncommand: %(cmd)r\n' 'errno: %(errno)r')
        LOG.error(f, {'cmd': ' '.join(cmd), 'errno': err.errno})
    finally:
        time.sleep(0)
Exemplo n.º 15
0
 def connect(self, callback):
     self._datacallback = callback
     master, slave = pty.openpty()
     self._master = master
     try:
         self.subproc = subprocess.Popen([self.executable],
                                         env=self.subenv,
                                         stdin=slave,
                                         stdout=slave,
                                         stderr=subprocess.PIPE,
                                         close_fds=True)
     except OSError:
         print "Unable to execute " + self.executable + " (permissions?)"
         self.close()
         return
     os.close(slave)
     fcntl.fcntl(master, fcntl.F_SETFL, os.O_NONBLOCK)
     fcntl.fcntl(self.subproc.stderr.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
     self.readerthread = eventlet.spawn(self.relaydata)
Exemplo n.º 16
0
 def test_st2api_wsgi_entry_point(self):
     port = random.randint(10000, 30000)
     cmd = (
         'gunicorn st2api.wsgi:application -k eventlet -b "127.0.0.1:%s" --workers 1'
         % port)
     env = os.environ.copy()
     env['ST2_CONFIG_PATH'] = ST2_CONFIG_PATH
     process = subprocess.Popen(cmd,
                                env=env,
                                shell=True,
                                preexec_fn=os.setsid)
     try:
         self.add_process(process=process)
         eventlet.sleep(8)
         self.assertProcessIsRunning(process=process)
         response = requests.get('http://127.0.0.1:%s/v1/actions' % (port))
         self.assertEqual(response.status_code, http_client.OK)
     finally:
         kill_process(process)
Exemplo n.º 17
0
def create_process(cmd, run_as_root=False, addl_env=None):
    """Create a process object for the given command.

    The return value will be a tuple of the process object and the
    list of command arguments used to create it.
    """
    cmd = list(map(str, addl_env_args(addl_env) + cmd))
    if run_as_root:
        # NOTE(ralonsoh): to be removed once the migration to privsep
        # execution is done.
        cmd = shlex.split(config.get_root_helper(cfg.CONF)) + cmd
    LOG.debug("Running command: %s", cmd)
    obj = subprocess.Popen(cmd,
                           shell=False,
                           stdin=subprocess.PIPE,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE)

    return obj, cmd
Exemplo n.º 18
0
    def _rsync_file(self,
                    db_file,
                    remote_file,
                    whole_file=True,
                    different_region=False):
        """
        Sync a single file using rsync. Used by _rsync_db to handle syncing.

        :param db_file: file to be synced
        :param remote_file: remote location to sync the DB file to
        :param whole-file: if True, uses rsync's --whole-file flag
        :param different_region: if True, the destination node is in a
                                 different region

        :returns: True if the sync was successful, False otherwise
        """
        popen_args = [
            'rsync', '--quiet', '--no-motd',
            '--timeout=%s' % int(math.ceil(self.node_timeout)),
            '--contimeout=%s' % int(math.ceil(self.conn_timeout))
        ]
        if self.rsync_password:
            passwd_opt = '--password-file=' + self.rsync_password_file
            popen_args.append(passwd_opt)
        if whole_file:
            popen_args.append('--whole-file')

        if self.rsync_compress and different_region:
            # Allow for compression, but only if the remote node is in
            # a different region than the local one.
            popen_args.append('--compress')

        popen_args.extend([db_file, remote_file])
        proc = subprocess.Popen(popen_args)
        proc.communicate()
        if proc.returncode != 0:
            self.logger.error(_('ERROR rsync failed with %(code)s: %(args)s'),
                              {
                                  'code': proc.returncode,
                                  'args': popen_args
                              })
        return proc.returncode == 0
Exemplo n.º 19
0
    def test_st2auth(self):
        port = random.randint(10000, 30000)
        config_path = os.path.join(
            BASE_DIR, '../../../st2auth/st2auth/gunicorn_config.py')
        st2_config_path = os.path.join(BASE_DIR, '../../../conf/st2.dev.conf')
        cmd = ('gunicorn_pecan %s -k eventlet -w 1 -b 127.0.0.1:%s' %
               (config_path, port))
        env = os.environ.copy()
        env['ST2_CONFIG_PATH'] = st2_config_path
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env=env,
                                   shell=True,
                                   preexec_fn=os.setsid)

        eventlet.sleep(5)
        response = requests.post('http://127.0.0.1:%s/tokens' % (port))
        self.assertEqual(response.status_code, httplib.UNAUTHORIZED)
        kill_process(process)
Exemplo n.º 20
0
def test_communicate_with_poll():
    # https://github.com/eventlet/eventlet/pull/24
    # `eventlet.green.subprocess.Popen.communicate()` was broken
    # in Python 2.7 because the usage of the `select` module was moved from
    # `_communicate` into two other methods `_communicate_with_select`
    # and `_communicate_with_poll`. Link to 2.7's implementation:
    # http://hg.python.org/cpython/file/2145593d108d/Lib/subprocess.py#l1255
    if getattr(original_subprocess.Popen, '_communicate_with_poll',
               None) is None:
        raise SkipTest(
            'original subprocess.Popen does not have _communicate_with_poll')

    p = subprocess.Popen(
        [sys.executable, '-c', 'import time; time.sleep(0.5)'],
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE)
    t1 = time.time()
    eventlet.with_timeout(0.1, p.communicate, timeout_value=True)
    tdiff = time.time() - t1
    assert 0.1 <= tdiff <= 0.2, 'did not stop within allowed time'
Exemplo n.º 21
0
 def test_st2api_wsgi_entry_point(self):
     port = random.randint(10000, 30000)
     config_path = os.path.join(
         BASE_DIR, '../../../st2api/st2api/gunicorn_config.py')
     cmd = ('gunicorn_pecan %s -k eventlet -w 1 -b 127.0.0.1:%s' %
            (config_path, port))
     env = os.environ.copy()
     env['ST2_CONFIG_PATH'] = ST2_CONFIG_PATH
     process = subprocess.Popen(cmd,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                env=env,
                                shell=True,
                                preexec_fn=os.setsid)
     self.add_process(process=process)
     eventlet.sleep(5)
     self.assertProcessIsRunning(process=process)
     response = requests.get('http://127.0.0.1:%s/v1/actions' % (port))
     self.assertEqual(response.status_code, httplib.OK)
     kill_process(process)
Exemplo n.º 22
0
def cms_sign_text(text, signing_cert_file_name, signing_key_file_name):
    """ Uses OpenSSL to sign a document
    Produces a Base64 encoding of a DER formatted CMS Document
    http://en.wikipedia.org/wiki/Cryptographic_Message_Syntax
    """
    _ensure_subprocess()
    process = subprocess.Popen([
        "openssl", "cms", "-sign", "-signer", signing_cert_file_name, "-inkey",
        signing_key_file_name, "-outform", "PEM", "-nosmimecap", "-nodetach",
        "-nocerts", "-noattr"
    ],
                               stdin=subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    output, err = process.communicate(text)
    retcode = process.poll()
    if retcode or "Error" in err:
        LOG.error('Signing error: %s' % err)
        raise subprocess.CalledProcessError(retcode, "openssl")
    return output
Exemplo n.º 23
0
def execute_command(cmdstring,
                    cwd=None,
                    preexec_fn=None,
                    env=None,
                    shell=False):
    if shell:
        cmdstring_list = cmdstring
    else:
        cmdstring_list = shlex.split(cmdstring)

    sub = subprocess.Popen(cmdstring_list,
                           cwd=cwd,
                           preexec_fn=preexec_fn,
                           env=env,
                           stdin=subprocess.PIPE,
                           stdout=subprocess.PIPE,
                           shell=shell,
                           bufsize=4096)
    output, err = sub.communicate()
    return int(sub.returncode), output, err
Exemplo n.º 24
0
def subprocess_popen(args,
                     stdin=None,
                     stdout=None,
                     stderr=None,
                     shell=False,
                     env=None,
                     preexec_fn=_subprocess_setup,
                     close_fds=True):

    # Set sensible FD limits - this is an adaption from oslo.rootwrap
    # See https://github.com/openstack/oslo.rootwrap/commit/c0a86998203315858721a7b2c8ab75fbf5cd51d9
    if not getattr(subprocess_popen, '_ccloud_fd_patch', False) and resource:
        # When use close_fds=True on Python 2.x, we spend significant time
        # in closing fds up to current soft ulimit, which could be large.
        # Lower our ulimit to a reasonable value to regain performance.
        fd_limits = resource.getrlimit(resource.RLIMIT_NOFILE)
        # sensible_fd_limit = min(common_config.rlimit_nofile, fd_limits[0])
        sensible_fd_limit = min(1024, fd_limits[0])
        if (fd_limits[0] > sensible_fd_limit):
            # Unfortunately this inherits to our children, so allow them to
            # re-raise by passing through the hard limit unmodified
            resource.setrlimit(resource.RLIMIT_NOFILE,
                               (sensible_fd_limit, fd_limits[1]))
            # This is set on import to the hard ulimit. if its defined we
            # already have imported it, so we need to update it to the new
            # limit.
            if (hasattr(subprocess, 'MAXFD')
                    and subprocess.MAXFD > sensible_fd_limit):
                subprocess.MAXFD = sensible_fd_limit
                subprocess_popen._ccloud_fd_patch = True
        else:
            subprocess_popen._ccloud_fd_patch = True

    return subprocess.Popen(args,
                            shell=shell,
                            stdin=stdin,
                            stdout=stdout,
                            stderr=stderr,
                            preexec_fn=preexec_fn,
                            close_fds=close_fds,
                            env=env)
Exemplo n.º 25
0
def cms_verify(formatted, signing_cert_file_name, ca_file_name):
    """Verifies the signature of the contents IAW CMS syntax.

    :raises: subprocess.CalledProcessError
    :raises: CertificateConfigError if certificate is not configured properly.
    """
    _ensure_subprocess()
    process = subprocess.Popen([
        "openssl", "cms", "-verify", "-certfile", signing_cert_file_name,
        "-CAfile", ca_file_name, "-inform", "PEM", "-nosmimecap", "-nodetach",
        "-nocerts", "-noattr"
    ],
                               stdin=subprocess.PIPE,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               universal_newlines=True)
    output, err, retcode = _process_communicate_handle_oserror(
        process, formatted, (signing_cert_file_name, ca_file_name))

    # Do not log errors, as some happen in the positive thread
    # instead, catch them in the calling code and log them there.

    # When invoke the openssl with not exist file, return code 2
    # and error msg will be returned.
    # You can get more from
    # http://www.openssl.org/docs/apps/cms.html#EXIT_CODES
    #
    # $ openssl cms -verify -certfile not_exist_file -CAfile \
    #       not_exist_file -inform PEM -nosmimecap -nodetach \
    #       -nocerts -noattr
    # Error opening certificate file not_exist_file
    #
    if retcode == 2:
        raise exceptions.CertificateConfigError(err)
    elif retcode:
        # NOTE(dmllr): Python 2.6 compatibility:
        # CalledProcessError did not have output keyword argument
        e = subprocess.CalledProcessError(retcode, "openssl")
        e.output = err
        raise e
    return output
Exemplo n.º 26
0
def bayesinfo():
    "Get bayes info"
    logger = bayesinfo.get_logger()
    logger.info(_("Generating Bayesian stats"))
    info = {}
    saprefs = config.get('ms.saprefs',
                         '/etc/MailScanner/spam.assassin.prefs.conf')

    pipe1 = subprocess.Popen(['sa-learn', '-p', saprefs, '--dump', 'magic'],
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
    while True:
        line = pipe1.stdout.readline()
        if not line:
            break
        match = BAYES_INFO_RE.match(line)
        if match:
            if match.group(5) == 'bayes db version':
                info['version'] = match.group(3)
            elif match.group(5) == 'nspam':
                info['spam'] = match.group(3)
            elif match.group(5) == 'nham':
                info['ham'] = match.group(3)
            elif match.group(5) == 'ntokens':
                info['tokens'] = match.group(3)
            elif match.group(5) == 'oldest atime':
                info['otoken'] = datetime.datetime\
                                .fromtimestamp(float(match.group(3)))
            elif match.group(5) == 'newest atime':
                info['ntoken'] = datetime.datetime\
                                .fromtimestamp(float(match.group(3)))
            elif match.group(5) == 'last journal sync atime':
                info['ljournal'] = datetime.datetime\
                                .fromtimestamp(float(match.group(3)))
            elif match.group(5) == 'last expiry atime':
                info['expiry'] = datetime.datetime\
                                .fromtimestamp(float(match.group(3)))
            elif match.group(5) == 'last expire reduction count':
                info['rcount'] = match.group(3)
    pipe1.wait(timeout=2)
    return info
Exemplo n.º 27
0
    def _execute_postgres_restore(self):
        # Postgresql outputs few benign messages into the stderr stream
        # during a normal restore procedure.
        # We need to watch for those and avoid raising
        # an exception in response.
        # Message 'ERROR:  role "postgres" already exists'
        # is expected and does not pose any problems to the restore operation.

        stream = self.storage.load(self.location, self.checksum)
        process = subprocess.Popen(self.restore_cmd, shell=True,
                                   stdin=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        content_length = 0
        for chunk in stream:
            process.stdin.write(chunk)
            content_length += len(chunk)
        process.stdin.close()
        self._handle_errors(process)
        LOG.info(_("Restored %s bytes from stream.") % content_length)

        return content_length
Exemplo n.º 28
0
    def lldp_agents_list(self):
        json_obj = json
        lldp_agents = []

        p = subprocess.Popen(["lldpcli", "-f", "json", "show", "interface",
                              "detail"], stdout=subprocess.PIPE)
        data = json_obj.loads(p.communicate()[0])

        lldp = data['lldp'][0]

        if not lldp.get('interface'):
            return lldp_agents

        for iface in lldp['interface']:
            agent_attrs = self._lldpd_get_attrs(iface)
            status = self._lldpd_get_agent_status()
            agent_attrs.update({"status": status})
            agent = plugin.Agent(**agent_attrs)
            lldp_agents.append(agent)

        return lldp_agents
Exemplo n.º 29
0
 def single(self, env, start_response):
     """Process a single a check call"""
     script = env['PATH_INFO'].strip('/')
     environ = os.environ.copy()
     environ.update(self.scripts[script]['env'])
     p = subprocess.Popen(
         "%s %s" %
         (self.scripts[script]['cmd'], self.scripts[script]['args']),
         shell=True,
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE,
         env=environ)
     stouterr = p.communicate()
     status = {
         '%s' % script: {
             'status': p.returncode,
             'out': stouterr[0].strip(),
             'err': stouterr[1].strip()
         }
     }
     start_response('200 OK', [('Content-Type', 'application/json')])
     return ['%s\r\n' % json.dumps(status)]
Exemplo n.º 30
0
 def mktest_family(self, i):
     family = list(self.geneFamilies[i])
     family.sort()
     familyid = repr(family).__hash__()
     ifname = '%s/clustalout/%s.fasta' % (run_name, familyid)
     ofname = '%s/mktest_out/%s.txt' % (run_name, familyid)
     if os.path.isfile(ofname):
         return
     try:
         in_num = self.n_values[str(familyid)]
     except KeyError as e:
         if os.path.isfile(ifname):
             print "Failed to mktest family %s (no n)." % str(familyid)
         return
     args = ('mktest', '-i', ifname, '-n', str(in_num))
     ofile = open(ofname, 'w')
     try:
         proc = gsub.Popen(args, stdout=ofile, stderr=ofile)
         proc.wait()
     except OSError:
         pass
     ofile.close()