예제 #1
0
def perform_krb181_workaround():
    cmdv = [
        configuration.get('kerberos', 'kinit_path'), "-c",
        configuration.get('kerberos', 'ccache'), "-R"
    ]  # Renew ticket_cache

    log.info("Renewing kerberos ticket to work around kerberos 1.8.1: " +
             " ".join(cmdv))

    ret = subprocess.call(cmdv, close_fds=True)

    if ret != 0:
        principal = "%s/%s" % (configuration.get(
            'kerberos', 'principal'), socket.getfqdn())
        fmt_dict = dict(princ=principal,
                        ccache=configuration.get('kerberos', 'principal'))
        log.error(
            "Couldn't renew kerberos ticket in order to work around "
            "Kerberos 1.8.1 issue. Please check that the ticket for "
            "'%(princ)s' is still renewable:\n"
            "  $ kinit -f -c %(ccache)s\n"
            "If the 'renew until' date is the same as the 'valid starting' "
            "date, the ticket cannot be renewed. Please check your KDC "
            "configuration, and the ticket renewal policy (maxrenewlife) "
            "for the '%(princ)s' and `krbtgt' principals." % fmt_dict)
        sys.exit(ret)
예제 #2
0
 def __init__(self, cluster_address=None):
     if cluster_address is None:
         cluster_address = configuration.get('dask', 'cluster_address')
     if not cluster_address:
         raise ValueError(
             'Please provide a Dask cluster address in airflow.cfg')
     self.cluster_address = cluster_address
     super(DaskExecutor, self).__init__(parallelism=0)
예제 #3
0
def run():
    if configuration.get('kerberos', 'keytab') is None:
        log.debug("Keytab renewer not starting, no keytab configured")
        sys.exit(0)

    while True:
        renew_from_kt()
        time.sleep(configuration.getint('kerberos', 'reinit_frequency'))
예제 #4
0
def renew_from_kt():
    # The config is specified in seconds. But we ask for that same amount in
    # minutes to give ourselves a large renewal buffer.
    renewal_lifetime = "%sm" % configuration.getint('kerberos',
                                                    'reinit_frequency')
    principal = configuration.get('kerberos',
                                  'principal').replace("_HOST",
                                                       socket.getfqdn())

    cmdv = [
        configuration.get('kerberos', 'kinit_path'),
        "-r",
        renewal_lifetime,
        "-k",  # host ticket
        "-t",
        configuration.get('kerberos', 'keytab'),  # specify keytab
        "-c",
        configuration.get('kerberos', 'ccache'),  # specify credentials cache
        principal
    ]
    log.info("Reinitting kerberos from keytab: " + " ".join(cmdv))

    subp = subprocess.Popen(cmdv,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            close_fds=True,
                            bufsize=-1,
                            universal_newlines=True)
    subp.wait()
    if subp.returncode != 0:
        log.error(
            "Couldn't reinit from keytab! `kinit' exited with %s.\n%s\n%s" %
            (subp.returncode, b"\n".join(subp.stdout.readlines()), b"\n".join(
                subp.stderr.readlines())))
        sys.exit(subp.returncode)

    global NEED_KRB181_WORKAROUND
    if NEED_KRB181_WORKAROUND is None:
        NEED_KRB181_WORKAROUND = detect_conf_var()
    if NEED_KRB181_WORKAROUND:
        # (From: HUE-640). Kerberos clock have seconds level granularity. Make sure we
        # renew the ticket after the initial valid time.
        time.sleep(1.5)
        perform_krb181_workaround()
예제 #5
0
def detect_conf_var():
    """Return true if the ticket cache contains "conf" information as is found
    in ticket caches of Kerberos 1.8.1 or later. This is incompatible with the
    Sun Java Krb5LoginModule in Java6, so we need to take an action to work
    around it.
    """
    ticket_cache = configuration.get('kerberos', 'ccache')

    with open(ticket_cache, 'rb') as f:
        # Note: this file is binary, so we check against a bytearray.
        return b'X-CACHECONF:' in f.read()
예제 #6
0
파일: __init__.py 프로젝트: P79N6A/xTool
def GetDefaultExecutor():
    """Creates a new instance of the configured executor if none exists and returns it"""
    global DEFAULT_EXECUTOR

    if DEFAULT_EXECUTOR is not None:
        return DEFAULT_EXECUTOR

    executor_name = configuration.get('core', 'EXECUTOR')

    DEFAULT_EXECUTOR = _get_executor(executor_name)

    log = LoggingMixin().log
    log.info("Using executor %s", executor_name)

    return DEFAULT_EXECUTOR
예제 #7
0
    hooks = []
    executors = []
    macros = []
    admin_views = []
    flask_blueprints = []
    menu_links = []

    @classmethod
    def validate(cls):
        """验证插件必须定义name静态变量 ."""
        if not cls.name:
            raise XToolPluginException("Your plugin needs a name.")


# 获得插件目录
plugins_folder = configuration.get('core', 'plugins_folder')
if not plugins_folder:
    plugins_folder = configuration.get('core', 'airflow_home') + '/plugins'
plugins_folder = os.path.expanduser(plugins_folder)

# 将插件目录加入到系统路径中
if plugins_folder not in sys.path:
    sys.path.append(plugins_folder)

plugins = []

norm_pattern = re.compile(r'[/|.]')

# Crawl through the plugins folder to find AirflowPlugin derivatives
for root, dirs, files in os.walk(plugins_folder, followlinks=True):
    for f in files:
예제 #8
0
파일: utils.py 프로젝트: P79N6A/xTool
def principal_from_username(username):
    realm = conf.get("security", "default_realm")
    if '@' not in username and realm:
        username = "******".format(username, realm)

    return username
예제 #9
0
import subprocess
import time
import os

from celery import Celery
from celery import states as celery_states

from xTool.config_templates.default_celery import DEFAULT_CELERY_CONFIG
from xTool.exceptions import XToolException
from xTool.executors.base_executor import BaseExecutor
from xTool import configuration
from xTool.utils.log.logging_mixin import LoggingMixin
from xTool.utils.module_loading import import_string

PARALLELISM = configuration.get('core', 'PARALLELISM')
'''
To start the celery worker, run the command:
airflow worker
'''

# 导入celery默认配置
if configuration.has_option('celery', 'celery_config_options'):
    celery_configuration = import_string(
        configuration.get('celery', 'celery_config_options'))
else:
    celery_configuration = DEFAULT_CELERY_CONFIG

app = Celery(configuration.get('celery', 'CELERY_APP_NAME'),
             config_source=celery_configuration)