def __init__(self, api): assert api is not None self.__api = api self.__finalize_called = False self.__finalized = False self.__finalize_lock = threading.RLock() cls = self.__class__ self.name = cls.__name__ self.module = cls.__module__ self.fullname = '%s.%s' % (self.module, self.name) self.bases = tuple( '%s.%s' % (b.__module__, b.__name__) for b in cls.__bases__ ) self.doc = _(cls.__doc__) if not self.doc.msg: self.summary = '<%s>' % self.fullname else: self.summary = unicode(self.doc).split('\n\n', 1)[0].strip() log_mgr.get_logger(self, True) if self.label is None: self.label = text.FixMe(self.name + '.label') if not isinstance(self.label, text.LazyText): raise TypeError( TYPE_ERROR % ( self.fullname + '.label', text.LazyText, type(self.label), self.label ) )
def __init__(self, api): assert api is not None self.__api = api self.__finalize_called = False self.__finalized = False self.__finalize_lock = threading.RLock() log_mgr.get_logger(self, True)
def __init__(self): ''' :returns: `SessionManager` object ''' log_mgr.get_logger(self, True) self.generated_session_ids = set() self.auth_mgr = SessionAuthManager()
def __init__(self, key, value, domain=None, path=None, max_age=None, expires=None, secure=None, httponly=None, timestamp=None): log_mgr.get_logger(self, True) self.key = key self.value = value self.domain = domain self.path = path self.max_age = max_age self.expires = expires self.secure = secure self.httponly = httponly self.timestamp = timestamp
def __init__(self, realm, subsystem, service_desc, host_name=None, nss_db=paths.PKI_TOMCAT_ALIAS_DIR, service_prefix=None): """Initializer""" super(DogtagInstance, self).__init__('pki-tomcatd', service_desc=service_desc, realm_name=realm, service_user=constants.PKI_USER, service_prefix=service_prefix) self.admin_password = None self.fqdn = host_name self.pkcs12_info = None self.clone = False self.basedn = DN(('o', 'ipa%s' % subsystem.lower())) self.admin_user = "******" self.admin_dn = DN(('uid', self.admin_user), ('ou', 'people'), ('o', 'ipaca')) self.admin_groups = None self.tmp_agent_db = None self.subsystem = subsystem self.security_domain_name = "IPA" # replication parameters self.master_host = None self.master_replication_port = None self.subject_base = None self.nss_db = nss_db self.log = log_mgr.get_logger(self)
def __init__(self, realm, subsystem, service_desc, host_name=None, nss_db=paths.PKI_TOMCAT_ALIAS_DIR, service_prefix=None): """Initializer""" super(DogtagInstance, self).__init__( 'pki-tomcatd', service_desc=service_desc, realm_name=realm, service_user=constants.PKI_USER, service_prefix=service_prefix ) self.admin_password = None self.fqdn = host_name self.pkcs12_info = None self.clone = False self.basedn = DN(('o', 'ipa%s' % subsystem.lower())) self.admin_user = "******" self.admin_dn = DN(('uid', self.admin_user), ('ou', 'people'), ('o', 'ipaca')) self.admin_groups = None self.agent_db = tempfile.mkdtemp(prefix="tmp-", dir=paths.VAR_LIB_IPA) self.subsystem = subsystem self.security_domain_name = "IPA" # replication parameters self.master_host = None self.master_replication_port = None self.subject_base = None self.nss_db = nss_db self.log = log_mgr.get_logger(self)
def __init__(self, realm): super(KRAInstance, self).__init__( realm=realm, subsystem="KRA", service_desc="KRA server", ) self.basedn = DN(('o', 'kra'), ('o', 'ipaca')) self.log = log_mgr.get_logger(self)
def __init__(self, argv, logger_name=None, log_stdout=True): self.returncode = None self.argv = argv self._done = False if logger_name: self.logger_name = logger_name else: self.logger_name = '%s.%s' % (self.__module__, type(self).__name__) self.log = log_mgr.get_logger(self.logger_name)
def __init__(self, realm, dogtag_constants=None): if dogtag_constants is None: dogtag_constants = dogtag.configured_constants() super(KRAInstance, self).__init__(realm=realm, subsystem="KRA", service_desc="KRA server", dogtag_constants=dogtag_constants) self.basedn = DN(('o', 'kra'), ('o', 'ipaca')) self.log = log_mgr.get_logger(self)
def __init__(self, realm, dogtag_constants=None): if dogtag_constants is None: dogtag_constants = dogtag.configured_constants() super(KRAInstance, self).__init__( realm=realm, subsystem="KRA", service_desc="KRA server", dogtag_constants=dogtag_constants ) self.basedn = DN(('o', 'kra'), ('o', 'ipaca')) self.log = log_mgr.get_logger(self)
def __init__(self, domain, hostname, role, index, ip=None, external_hostname=None): self.domain = domain self.role = role self.index = index shortname, dot, ext_domain = hostname.partition('.') self.shortname = shortname self.hostname = shortname + '.' + self.domain.name self.external_hostname = external_hostname or hostname self.netbios = self.domain.name.split('.')[0].upper() self.logger_name = '%s.%s.%s' % ( self.__module__, type(self).__name__, shortname) self.log = log_mgr.get_logger(self.logger_name) if ip: self.ip = ip else: if self.config.ipv6: # $(dig +short $M $rrtype|tail -1) stdout, stderr, returncode = ipautil.run( ['dig', '+short', self.external_hostname, 'AAAA']) self.ip = stdout.splitlines()[-1].strip() else: try: self.ip = socket.gethostbyname(self.external_hostname) except socket.gaierror: self.ip = None if not self.ip: raise RuntimeError('Could not determine IP address of %s' % self.external_hostname) self.root_password = self.config.root_password self.root_ssh_key_filename = self.config.root_ssh_key_filename self.host_key = None self.ssh_port = 22 self.env_sh_path = os.path.join(domain.config.test_dir, 'env.sh') self.log_collectors = []
def _start_pipe_thread(self, result_list, stream, name, do_log=True): """Start a thread that copies lines from ``stream`` to ``result_list`` If do_log is true, also logs the lines under ``name`` The thread is added to ``self.running_threads``. """ log = log_mgr.get_logger('%s.%s' % (self.logger_name, name)) def read_stream(): for line in stream: if do_log: log.debug(line.rstrip('\n')) result_list.append(line) thread = threading.Thread(target=read_stream) self.running_threads.add(thread) thread.start() return thread
def __init__(self, env=os.environ): self.log = log_mgr.get_logger(self) if 'BEAKERLIB' not in env: raise RuntimeError('$BEAKERLIB not set, cannot use BeakerLib') self.env = env # Set up the Bash process self.bash = subprocess.Popen(['bash'], stdin=subprocess.PIPE, stdout=open('/dev/null', 'w'), stderr=open('/dev/null', 'w')) source_path = os.path.join(self.env['BEAKERLIB'], 'beakerlib.sh') self.run_beakerlib_command(['.', source_path]) # _in_class_setup is set when we are in setup_class, so logs can be # collected just before the first test starts self._in_class_setup = False # Redirect logging to our own handlers self.setup_log_handler(BeakerLibLogHandler(self.run_beakerlib_command))
def __init__(self, realm, subsystem, service_desc, dogtag_constants=None, host_name=None, dm_password=None, ldapi=True): """Initializer""" if dogtag_constants is None: dogtag_constants = dogtag.configured_constants() super(DogtagInstance, self).__init__( '%sd' % dogtag_constants.PKI_INSTANCE_NAME, service_desc=service_desc, dm_password=dm_password, ldapi=ldapi ) self.dogtag_constants = dogtag_constants self.realm = realm self.dm_password = None self.admin_password = None self.fqdn = host_name self.domain = None self.pkcs12_info = None self.clone = False self.basedn = DN(('o', 'ipa%s' % subsystem.lower())) self.admin_user = DN(('uid', 'admin'), ('ou', 'people'), ('o', 'ipaca')) self.agent_db = tempfile.mkdtemp(prefix="tmp-") self.ds_port = DEFAULT_DSPORT self.server_root = dogtag_constants.SERVER_ROOT self.subsystem = subsystem self.security_domain_name = "IPA" # replication parameters self.master_host = None self.master_replication_port = None self.subject_base = None self.log = log_mgr.get_logger(self)
def __init__(self, realm): super(KRAInstance, self).__init__(realm=realm, subsystem="KRA", service_desc="KRA server") self.basedn = DN(("o", "kra"), ("o", "ipaca")) self.log = log_mgr.get_logger(self)
import six from ipalib import api from ipalib import errors from ipalib.text import _ from ipapython.ipa_log_manager import log_mgr if six.PY3: unicode = str __doc__ = _(""" Routines for constructing certificate signing requests using IPA data and stored templates. """) logger = log_mgr.get_logger(__name__) class IndexableUndefined(jinja2.Undefined): def __getitem__(self, key): return jinja2.Undefined( hint=self._undefined_hint, obj=self._undefined_obj, name=self._undefined_name, exc=self._undefined_exception) class IPAExtension(jinja2.ext.Extension): """Jinja2 extension providing useful features for CSR generation rules.""" def __init__(self, environment): super(IPAExtension, self).__init__(environment)
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Base class for FreeIPA integration tests""" import pytest from ipapython.ipa_log_manager import log_mgr from ipatests.test_integration import tasks from pytest_sourceorder import ordered log = log_mgr.get_logger(__name__) @ordered @pytest.mark.usefixtures('mh') @pytest.mark.usefixtures('integration_logs') class IntegrationTest(object): num_replicas = 0 num_clients = 0 num_ad_domains = 0 required_extra_roles = [] topology = None domain_level = None @classmethod def setup_class(cls):
def __init__(self, name): log_mgr.get_logger(self, True) self.name = name
} _PARAMS = { 'Decimal': parameters.Decimal, 'DN': parameters.DNParam, 'DNSName': parameters.DNSNameParam, 'Principal': parameters.Principal, 'bool': parameters.Bool, 'bytes': parameters.Bytes, 'datetime': parameters.DateTime, 'dict': parameters.Dict, 'int': parameters.Int, 'str': parameters.Str, } logger = log_mgr.get_logger(__name__) class _SchemaCommand(ClientCommand): pass class _SchemaMethod(ClientMethod): @property def obj_name(self): return self.api.Object[self.obj_full_name].name @property def obj_version(self): return self.api.Object[self.obj_full_name].version
def __init__(self): log_mgr.get_logger(self, True) self.auth_managers = {}
def get_logger(self, name): return log_mgr.get_logger(name)
def __init__(self, host): self.host = host self.logger_name = '%s.%s' % (host.logger_name, type(self).__name__) self.log = log_mgr.get_logger(self.logger_name) self._command_index = 0
def __init__(self): super(BeakerLibPlugin, self).__init__() self.log = log_mgr.get_logger(self) self._in_class_setup = False
def bootstrap(self, parser=None, **overrides): """ Initialize environment variables and logging. """ self.__doing('bootstrap') self.log = log_mgr.get_logger(self) self.env._bootstrap(**overrides) self.env._finalize_core(**dict(DEFAULT_CONFIG)) # Add the argument parser if not parser: parser = self.build_global_parser() self.parser = parser root_logger = logging.getLogger() # If logging has already been configured somewhere else (like in the # installer), don't add handlers or change levels: if root_logger.handlers or self.env.validate_api: return if self.env.debug: level = logging.DEBUG else: level = logging.INFO root_logger.setLevel(level) for attr in self.env: match = re.match( r'^log_logger_level_' r'(debug|info|warn|warning|error|critical|\d+)$', attr) if not match: continue level = ipa_log_manager.convert_log_level(match.group(1)) value = getattr(self.env, attr) regexps = re.split('\s*,\s*', value) # Add the regexp, it maps to the configured level for regexp in regexps: root_logger.addFilter(ipa_log_manager.Filter(regexp, level)) # Add stderr handler: level = logging.INFO if self.env.debug: level = logging.DEBUG else: if self.env.context == 'cli': if self.env.verbose > 0: level = logging.INFO else: level = logging.WARNING handler = logging.StreamHandler() handler.setLevel(level) handler.setFormatter(ipa_log_manager.Formatter(LOGGING_FORMAT_STDERR)) root_logger.addHandler(handler) # Add file handler: if self.env.mode in ('dummy', 'unit_test'): return # But not if in unit-test mode if self.env.log is None: return log_dir = path.dirname(self.env.log) if not path.isdir(log_dir): try: os.makedirs(log_dir) except OSError: logger.error('Could not create log_dir %r', log_dir) return level = logging.INFO if self.env.debug: level = logging.DEBUG try: handler = logging.FileHandler(self.env.log) except IOError as e: logger.error('Cannot open log file %r: %s', self.env.log, e) return handler.setLevel(level) handler.setFormatter(ipa_log_manager.Formatter(LOGGING_FORMAT_FILE)) root_logger.addHandler(handler)
def bootstrap(self, parser=None, **overrides): """ Initialize environment variables and logging. """ self.__doing('bootstrap') self.log = log_mgr.get_logger(self) self.env._bootstrap(**overrides) self.env._finalize_core(**dict(DEFAULT_CONFIG)) # Add the argument parser if not parser: parser = self.build_global_parser() self.parser = parser root_logger = logging.getLogger() # If logging has already been configured somewhere else (like in the # installer), don't add handlers or change levels: if root_logger.handlers or self.env.validate_api: return if self.env.debug: level = logging.DEBUG else: level = logging.INFO root_logger.setLevel(level) for attr in self.env: match = re.match(r'^log_logger_level_' r'(debug|info|warn|warning|error|critical|\d+)$', attr) if not match: continue level = ipa_log_manager.convert_log_level(match.group(1)) value = getattr(self.env, attr) regexps = re.split('\s*,\s*', value) # Add the regexp, it maps to the configured level for regexp in regexps: root_logger.addFilter(ipa_log_manager.Filter(regexp, level)) # Add stderr handler: level = logging.INFO if self.env.debug: level = logging.DEBUG else: if self.env.context == 'cli': if self.env.verbose > 0: level = logging.INFO else: level = logging.WARNING handler = logging.StreamHandler() handler.setLevel(level) handler.setFormatter(ipa_log_manager.Formatter(LOGGING_FORMAT_STDERR)) root_logger.addHandler(handler) # check after logging is set up but before we create files. fse = sys.getfilesystemencoding() if fse.lower() not in {'utf-8', 'utf8'}: raise errors.SystemEncodingError(encoding=fse) # Add file handler: if self.env.mode in ('dummy', 'unit_test'): return # But not if in unit-test mode if self.env.log is None: return log_dir = path.dirname(self.env.log) if not path.isdir(log_dir): try: os.makedirs(log_dir) except OSError: logger.error('Could not create log_dir %r', log_dir) return level = logging.INFO if self.env.debug: level = logging.DEBUG try: handler = logging.FileHandler(self.env.log) except IOError as e: logger.error('Cannot open log file %r: %s', self.env.log, e) return handler.setLevel(level) handler.setFormatter(ipa_log_manager.Formatter(LOGGING_FORMAT_FILE)) root_logger.addHandler(handler)
def __init__(self, dm_password=None, sub_dict={}, online=True, ldapi=False): ''' :parameters: dm_password Directory Manager password sub_dict substitution dictionary online Do an online LDAP update or use an experimental LDIF updater ldapi Bind using ldapi. This assumes autobind is enabled. Data Structure Example: ----------------------- dn_by_rdn_count = { 3: 'cn=config,dc=example,dc=com': 4: 'cn=bob,ou=people,dc=example,dc=com', } all_updates = [ { 'dn': 'cn=config,dc=example,dc=com', 'default': [ dict(attr='attr1', value='default1'), ], 'updates': [ dict(action='action', attr='attr1', value='value1'), dict(action='replace', attr='attr2', value=['old', 'new']), ] }, { 'dn': 'cn=bob,ou=people,dc=example,dc=com', 'default': [ dict(attr='attr3', value='default3'), ], 'updates': [ dict(action='action', attr='attr3', value='value3'), dict(action='action', attr='attr4', value='value4'), } } ] Please notice the replace action requires two values in list The default and update lists are "dispositions" Plugins: Plugins has to be specified in update file to be executed, using 'plugin' directive Example: plugin: update_uniqueness_plugins_to_new_syntax Each plugin returns two values: 1. restart: dirsrv will be restarted AFTER this update is applied. 2. updates: A list of updates to be applied. The value of an update is a dictionary with the following possible values: - dn: DN, equal to the dn attribute - updates: list of updates against the dn - default: list of the default entry to be added if it doesn't exist - deleteentry: list of dn's to be deleted (typically single dn) For example, this update file: dn: cn=global_policy,cn=$REALM,cn=kerberos,$SUFFIX replace:krbPwdLockoutDuration:10::600 replace: krbPwdMaxFailure:3::6 Generates this list which contain the update dictionary: [ { 'dn': 'cn=global_policy,cn=EXAMPLE.COM,cn=kerberos,dc=example,dc=com', 'updates': [ dict(action='replace', attr='krbPwdLockoutDuration', value=['10','600']), dict(action='replace', attr='krbPwdMaxFailure', value=['3','6']), ] } ] Here is another example showing how a default entry is configured: dn: cn=Managed Entries,cn=etc,$SUFFIX default: objectClass: nsContainer default: objectClass: top default: cn: Managed Entries This generates: [ { 'dn': 'cn=Managed Entries,cn=etc,dc=example,dc=com', 'default': [ dict(attr='objectClass', value='nsContainer'), dict(attr='objectClass', value='top'), dict(attr='cn', value='Managed Entries'), ] } ] Note that the variable substitution in both examples has been completed. Either may make changes directly in LDAP or can return updates in update format. ''' log_mgr.get_logger(self, True) self.sub_dict = sub_dict self.dm_password = dm_password self.conn = None self.modified = False self.online = online self.ldapi = ldapi self.pw_name = pwd.getpwuid(os.geteuid()).pw_name self.realm = None self.socket_name = ( paths.SLAPD_INSTANCE_SOCKET_TEMPLATE % api.env.realm.replace('.', '-') ) self.ldapuri = 'ldapi://%s' % ipautil.format_netloc( self.socket_name ) suffix = None if sub_dict.get("REALM"): self.realm = sub_dict["REALM"] else: self.realm = api.env.realm suffix = ipautil.realm_to_suffix(self.realm) if self.realm else None if suffix is not None: assert isinstance(suffix, DN) domain = ipautil.get_domain_name() libarch = self._identify_arch() fqdn = installutils.get_fqdn() if fqdn is None: raise RuntimeError("Unable to determine hostname") if not self.sub_dict.get("REALM") and self.realm is not None: self.sub_dict["REALM"] = self.realm if not self.sub_dict.get("FQDN"): self.sub_dict["FQDN"] = fqdn if not self.sub_dict.get("DOMAIN"): self.sub_dict["DOMAIN"] = domain if not self.sub_dict.get("SUFFIX") and suffix is not None: self.sub_dict["SUFFIX"] = suffix if not self.sub_dict.get("ESCAPED_SUFFIX"): self.sub_dict["ESCAPED_SUFFIX"] = str(suffix) if not self.sub_dict.get("LIBARCH"): self.sub_dict["LIBARCH"] = libarch if not self.sub_dict.get("TIME"): self.sub_dict["TIME"] = int(time.time()) if not self.sub_dict.get("DOMAIN") and domain is not None: self.sub_dict["DOMAIN"] = domain if not self.sub_dict.get("MIN_DOMAIN_LEVEL"): self.sub_dict["MIN_DOMAIN_LEVEL"] = str(constants.MIN_DOMAIN_LEVEL) if not self.sub_dict.get("MAX_DOMAIN_LEVEL"): self.sub_dict["MAX_DOMAIN_LEVEL"] = str(constants.MAX_DOMAIN_LEVEL) if not self.sub_dict.get("STRIP_ATTRS"): self.sub_dict["STRIP_ATTRS"] = "%s" % ( " ".join(constants.REPL_AGMT_STRIP_ATTRS),) if not self.sub_dict.get("EXCLUDES"): self.sub_dict["EXCLUDES"] = "(objectclass=*) $ EXCLUDE %s" % ( " ".join(constants.REPL_AGMT_EXCLUDES),) if not self.sub_dict.get("TOTAL_EXCLUDES"): self.sub_dict["TOTAL_EXCLUDES"] = "(objectclass=*) $ EXCLUDE " + \ " ".join(constants.REPL_AGMT_TOTAL_EXCLUDES) self.api = create_api(mode=None) self.api.bootstrap(in_server=True, context='updates') self.api.finalize() if online: # Try out the connection/password # (This will raise if the server is not available) self.create_connection() self.close_connection() else: raise RuntimeError("Offline updates are not supported.")