Example #1
0
    def _check_output(self, cmd: List[str]) -> str:
        logger = logging.getLogger("pbspro.driver")

        logger.info("Running: %s", " ".join(cmd))

        try:
            ret = check_output(cmd, stderr=PIPE).decode()
            logger.info("Response: %s", ret)
            return ret
        except CalledProcessError as e:
            logger.debug(str(e))
            raise
Example #2
0
    def _log_response(self, s: ResponseStatus, r: Any) -> None:
        if logging.getLogger().getEffectiveLevel() > logging.DEBUG:
            return

        import inspect

        current_frame = inspect.currentframe()
        caller_frame = inspect.getouterframes(current_frame, 2)
        caller = "[{}]".format(caller_frame[1].function)

        as_json = json.dumps(r.to_dict())

        logging.debug(
            "[%s] Response: Status=%s -> %s", caller, s.status_code, as_json[:100],
        )

        if logging.getLogger().getEffectiveLevel() > logging.FINE:
            return

        logging.fine(
            "[%s] Full response: Status=%s -> %s", caller, s.status_code, as_json,
        )
Example #3
0
    def flush(self) -> None:
        buf = self.line_buffer.getvalue()
        if not buf:
            return
        fact = logginglib.getLogRecordFactory()
        logger = logging.getLogger(self.logger_name)
        created = None
        for line in buf.splitlines(keepends=False):
            record = fact(
                name="demandprinter",
                level=logging.INFO,
                pathname=__file__,
                lineno=1,
                msg=line,
                args=(),
                exc_info=None,
                created=created,
            )
            created = created or record.created
            logger.handle(record)

        self.line_buffer = io.StringIO()
    def flush(self) -> None:
        buf = self.line_buffer.getvalue()
        if not buf:
            return
        fact = logginglib.getLogRecordFactory()
        root = logging.getLogger()

        if not root.filters:
            root.addFilter(ExcludeDemandPrinterFilter("root"))

        for line in buf.splitlines(keepends=False):
            record = fact(
                name="demandprinter",
                level=logging.INFO,
                pathname=__file__,
                lineno=1,
                msg=line,
                args=(),
                exc_info=None,
            )
            root.handle(record)

        self.line_buffer = io.StringIO()
Example #5
0
from cyclecloud.model.NodeCreationResultModule import NodeCreationResult
from cyclecloud.model.NodeListModule import NodeList
from cyclecloud.model.NodeManagementRequestModule import NodeManagementRequest
from cyclecloud.model.NodeManagementResultModule import NodeManagementResult
from cyclecloud.session import ResponseStatus
from requests.structures import CaseInsensitiveDict
from urllib3.exceptions import InsecureRequestWarning

import hpc.autoscale.hpclogging as logging
from hpc.autoscale import hpctypes as ht
from hpc.autoscale.ccbindings.interface import ClusterBindingInterface
from hpc.autoscale.codeanalysis import hpcwrap, hpcwrapclass
from hpc.autoscale.node.node import Node
from hpc.autoscale.util import partition

logger = logging.getLogger("cyclecloud.clustersapi")


class ReadOnlyModeException(RuntimeError):
    pass


def notreadonly(method: Callable) -> Callable:
    def readonlywrapper(*args: Any, **kwargs: Any) -> Optional[Any]:
        if args[0].read_only:
            raise ReadOnlyModeException(
                "Can not call {} in read only mode.".format(method.__name__)
            )
        return method(*args, **kwargs)

    return readonlywrapper
Example #6
0
import os
import shlex
import subprocess
import sys
from abc import ABC, abstractmethod, abstractproperty
from shutil import which
from subprocess import STDOUT, CalledProcessError
from subprocess import check_call as _check_call
from subprocess import check_output as _check_output
from typing import Any, List, Optional

from hpc.autoscale import hpclogging as logging

_QCMD_LOGGER = logging.getLogger("gridengine.driver")
_QCONF_PATH = which("qconf") or ""
_QMOD_PATH = which("qmod") or ""
_QSELECT_PATH = which("qselect") or ""
_QSTAT_PATH = which("qstat") or ""

__VALIDATED = False
if not __VALIDATED:
    for key, value in list(globals().items()):
        if key.startswith("_Q") and key.endswith("_PATH"):
            if not value:
                executable = key.split("_")[0].lower()
                logging.error("Could not find %s in PATH: %s", executable,
                              os.environ.get("PATH"))
__VALIDATED = True


def check_call(cmd: List[str], *args: Any, **kwargs: Any) -> None:
Example #7
0
def create_support_archive(config: Dict, archive: str) -> None:
    """
    Creates an archive with most logs and configurations required when requesting support.
    """
    ge_env = environment.from_qconf(config)

    # for some reason mypy doesn't see gzopen
    tf = tarfile.TarFile.gzopen(archive, "w")  # type: ignore

    def _add(cmd: List[str], name: str) -> None:
        contents = ge_env.qbin.qconf(cmd)
        _add_contents(contents, name)

    def _add_contents(contents: str, name: str) -> None:
        tarinfo = tarfile.TarInfo("gridengine-support/" + name)
        tarinfo.size = len(contents)
        tarinfo.mtime = int(time.time())
        fr = io.BytesIO(contents.encode())
        tf.addfile(tarinfo, fr)

    # get our queue definitions
    for qname in ge_env.queues:
        _add(["-sq", qname], "queue_{}".format(qname))

    # get our parallel env definitions
    for pe_name in ge_env.pes:
        _add(["-sp", pe_name], "pe_{}".format(pe_name))

    # get a list of hostgroups. Actual definition of hgs is immaterial
    _add(["-shgrpl"], "hostgroups")
    # dump out the complexes
    _add(["-sc"], "complexes")

    config_no_creds = dict(config)
    config_no_creds["password"] = ""
    config_no_creds["cluster_name"] = ""
    config_no_creds["username"] = ""
    config_no_creds["url"] = ""
    _add_contents(json.dumps(config_no_creds, indent=2), "autoscale.json")

    install_logs = os.path.join(os.getenv("SGE_ROOT", ""),
                                os.getenv("SGE_CELL", ""),
                                "common/install_logs")
    if os.path.exists(install_logs):
        for fil in os.listdir(install_logs):
            path = os.path.join(install_logs, fil)
            with open(path) as fr:
                _add_contents(fr.read(), fil)

    # e.g. /sched/sge/sge-2011.11/default/spool/qmaster/messages
    spool_dir = os.path.join(os.getenv("SGE_ROOT", ""),
                             os.getenv("SGE_CELL", ""), "spool")
    if os.path.exists(spool_dir):
        for hostname in os.listdir(spool_dir):
            messages_path = os.path.join(spool_dir, hostname, "messages")
            if os.path.exists(messages_path):
                with open(messages_path) as fr:
                    _add_contents(fr.read(), "messages_{}".format(hostname))

    # may not exist on self-installs
    chef_client_log = "/opt/cycle/jetpack/logs/chef-client.log"
    if os.path.exists(chef_client_log):
        with open(chef_client_log) as fr:
            _add_contents(fr.read(), "chef-client.log")

    # find autoscale.log and autoscale.log.1-5
    for handler in logging.getLogger().handlers:
        if hasattr(handler, "baseFilename"):
            base_filename = getattr(handler, "baseFilename")
            file_names = [base_filename] + [
                base_filename + ".{}".format(n) for n in range(1, 6)
            ]
            for fname in file_names:
                if os.path.exists(fname):
                    with open(fname) as fr:
                        _add_contents(fr.read(), os.path.basename(fname))
    tf.close()
    print("Wrote archive to", archive)