示例#1
0
    def __spawn_instance(self):
        """
        Create and configure a new KRA instance using pkispawn.
        Creates a configuration file with IPA-specific
        parameters and passes it to the base class to call pkispawn
        """

        # Create an empty and secured file
        (cfg_fd, cfg_file) = tempfile.mkstemp()
        os.close(cfg_fd)
        pent = pwd.getpwnam(self.service_user)
        os.chown(cfg_file, pent.pw_uid, pent.pw_gid)
        self.tmp_agent_db = tempfile.mkdtemp(prefix="tmp-",
                                             dir=paths.VAR_LIB_IPA)
        tmp_agent_pwd = ipautil.ipa_generate_password()

        # Create a temporary file for the admin PKCS #12 file
        (admin_p12_fd, admin_p12_file) = tempfile.mkstemp()
        os.close(admin_p12_fd)

        # Create KRA configuration
        config = RawConfigParser()
        config.optionxform = str
        config.add_section("KRA")

        # Security Domain Authentication
        config.set("KRA", "pki_security_domain_https_port", "443")
        config.set("KRA", "pki_security_domain_password", self.admin_password)
        config.set("KRA", "pki_security_domain_user", self.admin_user)

        # issuing ca
        config.set("KRA", "pki_issuing_ca_uri",
                   "https://%s" % ipautil.format_netloc(self.fqdn, 443))

        # Server
        config.set("KRA", "pki_enable_proxy", "True")
        config.set("KRA", "pki_restart_configured_instance", "False")
        config.set("KRA", "pki_backup_keys", "True")
        config.set("KRA", "pki_backup_password", self.admin_password)

        # Client security database
        config.set("KRA", "pki_client_database_dir", self.tmp_agent_db)
        config.set("KRA", "pki_client_database_password", tmp_agent_pwd)
        config.set("KRA", "pki_client_database_purge", "True")
        config.set("KRA", "pki_client_pkcs12_password", self.admin_password)

        # Administrator
        config.set("KRA", "pki_admin_name", self.admin_user)
        config.set("KRA", "pki_admin_uid", self.admin_user)
        config.set("KRA", "pki_admin_email", "root@localhost")
        config.set("KRA", "pki_admin_password", self.admin_password)
        config.set("KRA", "pki_admin_nickname", "ipa-ca-agent")
        config.set("KRA", "pki_admin_subject_dn",
                   str(DN(('cn', 'ipa-ca-agent'), self.subject_base)))
        config.set("KRA", "pki_import_admin_cert", "False")
        config.set("KRA", "pki_client_admin_cert_p12", admin_p12_file)

        # Directory server
        config.set("KRA", "pki_ds_ldap_port", "389")
        config.set("KRA", "pki_ds_password", self.dm_password)
        config.set("KRA", "pki_ds_base_dn", six.text_type(self.basedn))
        config.set("KRA", "pki_ds_database", "ipaca")
        config.set("KRA", "pki_ds_create_new_db", "False")

        self._use_ldaps_during_spawn(config)

        # Certificate subject DNs
        config.set("KRA", "pki_subsystem_subject_dn",
                   str(DN(('cn', 'CA Subsystem'), self.subject_base)))
        config.set("KRA", "pki_ssl_server_subject_dn",
                   str(DN(('cn', self.fqdn), self.subject_base)))
        config.set("KRA", "pki_audit_signing_subject_dn",
                   str(DN(('cn', 'KRA Audit'), self.subject_base)))
        config.set(
            "KRA", "pki_transport_subject_dn",
            str(DN(('cn', 'KRA Transport Certificate'), self.subject_base)))
        config.set(
            "KRA", "pki_storage_subject_dn",
            str(DN(('cn', 'KRA Storage Certificate'), self.subject_base)))

        # Certificate nicknames
        # Note that both the server certs and subsystem certs reuse
        # the ca certs.
        config.set("KRA", "pki_subsystem_nickname",
                   "subsystemCert cert-pki-ca")
        config.set("KRA", "pki_ssl_server_nickname", "Server-Cert cert-pki-ca")
        config.set("KRA", "pki_audit_signing_nickname",
                   "auditSigningCert cert-pki-kra")
        config.set("KRA", "pki_transport_nickname",
                   "transportCert cert-pki-kra")
        config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra")

        # Shared db settings
        # Needed because CA and KRA share the same database
        # We will use the dbuser created for the CA
        config.set("KRA", "pki_share_db", "True")
        config.set(
            "KRA", "pki_share_dbuser_dn",
            str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca'))))

        if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR)
                and os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)):
            # generate pin which we know can be used for FIPS NSS database
            pki_pin = ipautil.ipa_generate_password()
            config.set("KRA", "pki_pin", pki_pin)
        else:
            pki_pin = None

        _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP)

        if self.clone:
            krafile = self.pkcs12_info[0]
            shutil.copy(krafile, p12_tmpfile_name)
            pent = pwd.getpwnam(self.service_user)
            os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid)

            # Security domain registration
            config.set("KRA", "pki_security_domain_hostname", self.fqdn)
            config.set("KRA", "pki_security_domain_https_port", "443")
            config.set("KRA", "pki_security_domain_user", self.admin_user)
            config.set("KRA", "pki_security_domain_password",
                       self.admin_password)

            # Clone
            config.set("KRA", "pki_clone", "True")
            config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name)
            config.set("KRA", "pki_clone_pkcs12_password", self.dm_password)
            config.set("KRA", "pki_clone_setup_replication", "False")
            config.set(
                "KRA", "pki_clone_uri",
                "https://%s" % ipautil.format_netloc(self.master_host, 443))
        else:
            # the admin cert file is needed for the first instance of KRA
            cert = self.get_admin_cert()
            # First make sure that the directory exists
            parentdir = os.path.dirname(paths.ADMIN_CERT_PATH)
            if not os.path.exists(parentdir):
                os.makedirs(parentdir)
            with open(paths.ADMIN_CERT_PATH, "wb") as admin_path:
                admin_path.write(
                    base64.b64encode(cert.public_bytes(x509.Encoding.DER)))

        # Generate configuration file
        with open(cfg_file, "w") as f:
            config.write(f)

        try:
            DogtagInstance.spawn_instance(self,
                                          cfg_file,
                                          nolog_list=(self.dm_password,
                                                      self.admin_password,
                                                      pki_pin, tmp_agent_pwd))
        finally:
            os.remove(p12_tmpfile_name)
            os.remove(cfg_file)
            os.remove(admin_p12_file)

        shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12)
        logger.debug("completed creating KRA instance")
示例#2
0
# Copyright 2018 CTTC www.cttc.es
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# python imports
from six.moves.configparser import RawConfigParser

# load db IP port for all db modules
config = RawConfigParser()
config.read("../../db/db.properties")
db_ip = config.get("MongoDB", "db.ip")
db_port = int(config.get("MongoDB", "db.port"))
示例#3
0
def read_config(filename):
    """Read the config file called *filename*."""
    cp_ = RawConfigParser()
    cp_.read(filename)

    res = {}

    for section in cp_.sections():
        res[section] = dict(cp_.items(section))
        res[section].setdefault("delete", False)
        if res[section]["delete"] in ["", "False", "false", "0", "off"]:
            res[section]["delete"] = False
        if res[section]["delete"] in ["True", "true", "on", "1"]:
            res[section]["delete"] = True
        res[section].setdefault("working_directory", None)
        res[section].setdefault("compression", False)
        res[section].setdefault("xritdecompressor", None)
        res[section].setdefault("heartbeat", True)
        res[section].setdefault("req_timeout", DEFAULT_REQ_TIMEOUT)
        res[section].setdefault("transfer_req_timeout",
                                10 * DEFAULT_REQ_TIMEOUT)
        res[section].setdefault("nameservers", None)
        if res[section]["heartbeat"] in ["", "False", "false", "0", "off"]:
            res[section]["heartbeat"] = False

        if "providers" not in res[section]:
            LOGGER.warning("Incomplete section %s: add an 'providers' item.",
                           section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue
        else:
            res[section]["providers"] = [
                "tcp://" + item.split('/', 1)[0]
                for item in res[section]["providers"].split()
            ]

        if "destination" not in res[section]:
            LOGGER.warning("Incomplete section %s: add an 'destination' item.",
                           section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue

        if "topic" in res[section]:
            try:
                res[section]["publish_port"] = int(
                    res[section]["publish_port"])
            except (KeyError, ValueError):
                res[section]["publish_port"] = 0
        elif not res[section]["heartbeat"]:
            # We have no topics and therefor no subscriber (if you want to
            # subscribe everything, then explicit specify an empty topic).
            LOGGER.warning(
                "Incomplete section %s: add an 'topic' "
                "item or enable heartbeat.", section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue

    return res
示例#4
0
def main():
    '''Main(). Commandline parsing and stalker startup.'''

    print("Setting timezone to UTC")
    os.environ["TZ"] = "UTC"
    time.tzset()

    parser = argparse.ArgumentParser()

    parser.add_argument("-d",
                        "--monitored_dirs",
                        dest="monitored_dirs",
                        nargs='+',
                        type=str,
                        default=[],
                        help="Names of the monitored directories "
                        "separated by space")
    parser.add_argument("-p",
                        "--posttroll_port",
                        dest="posttroll_port",
                        default=0,
                        type=int,
                        help="Local port where messages are published")
    parser.add_argument("-t",
                        "--topic",
                        dest="topic",
                        type=str,
                        default=None,
                        help="Topic of the sent messages")
    parser.add_argument("-c",
                        "--configuration_file",
                        type=str,
                        help="Name of the config.ini configuration file")
    parser.add_argument("-C",
                        "--config_item",
                        type=str,
                        help="Name of the configuration item to use")
    parser.add_argument("-e",
                        "--event_names",
                        type=str,
                        default=None,
                        help="Name of the pyinotify events to monitor")
    parser.add_argument("-f",
                        "--filepattern",
                        type=str,
                        help="Filepattern used to parse "
                        "satellite/orbit/date/etc information")
    parser.add_argument("-i",
                        "--instrument",
                        type=str,
                        default=None,
                        help="Instrument name in the satellite")
    parser.add_argument("-n",
                        "--nameservers",
                        type=str,
                        default=None,
                        help="Posttroll nameservers to register own address,"
                        " otherwise multicasting is used")

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit()
    else:
        args = parser.parse_args()

    # Parse commandline arguments.  If args are given, they override
    # the configuration file.

    # Check first commandline arguments
    monitored_dirs = args.monitored_dirs
    if monitored_dirs == '':
        monitored_dirs = None

    posttroll_port = args.posttroll_port
    topic = args.topic
    event_names = args.event_names
    instrument = args.instrument
    nameservers = args.nameservers

    filepattern = args.filepattern
    if args.filepattern == '':
        filepattern = None

    if args.configuration_file is not None:
        config_fname = args.configuration_file

        if "template" in config_fname:
            print("Template file given as trollstalker logging config,"
                  " aborting!")
            sys.exit()

        config = RawConfigParser()
        config.read(config_fname)
        config = OrderedDict(config.items(args.config_item))
        config['name'] = args.configuration_file

        topic = topic or config['topic']
        monitored_dirs = monitored_dirs or config['directory'].split(",")
        filepattern = filepattern or config['filepattern']
        try:
            posttroll_port = posttroll_port or int(config['posttroll_port'])
        except (KeyError, ValueError):
            if posttroll_port is None:
                posttroll_port = 0
        try:
            filepattern = filepattern or config['filepattern']
        except KeyError:
            pass
        try:
            event_names = event_names or config['event_names']
        except KeyError:
            pass
        try:
            instrument = instrument or config['instruments']
        except KeyError:
            pass
        try:
            history = int(config['history'])
        except KeyError:
            history = 0

        try:
            nameservers = nameservers or config['nameservers']
        except KeyError:
            nameservers = []

        aliases = helper_functions.parse_aliases(config)
        tbus_orbit = bool(config.get("tbus_orbit", False))

        granule_length = float(config.get("granule", 0))

        custom_vars = parse_vars(config)

        try:
            log_config = config["stalker_log_config"]
        except KeyError:
            try:
                loglevel = getattr(logging, config["loglevel"])
                if loglevel == "":
                    raise AttributeError
            except AttributeError:
                loglevel = logging.DEBUG
            LOGGER.setLevel(loglevel)

            strhndl = logging.StreamHandler()
            strhndl.setLevel(loglevel)
            log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s"
            formatter = logging.Formatter(log_format)

            strhndl.setFormatter(formatter)
            LOGGER.addHandler(strhndl)
        else:
            logging.config.fileConfig(log_config)

    event_names = event_names or 'IN_CLOSE_WRITE,IN_MOVED_TO'

    LOGGER.debug("Logger started")

    if type(monitored_dirs) is not list:
        monitored_dirs = [monitored_dirs]

    if nameservers:
        nameservers = nameservers.split(',')
    else:
        nameservers = []

    # Start watching for new files
    notifier = create_notifier(topic,
                               instrument,
                               posttroll_port,
                               filepattern,
                               event_names,
                               monitored_dirs,
                               aliases=aliases,
                               tbus_orbit=tbus_orbit,
                               history=history,
                               granule_length=granule_length,
                               custom_vars=custom_vars,
                               nameservers=nameservers)
    notifier.start()

    try:
        while True:
            time.sleep(6000000)
    except KeyboardInterrupt:
        LOGGER.info("Interupting TrollStalker")
    finally:
        notifier.stop()
示例#5
0
def load_config(environ):
    """Load configuration options

    Options are read from a config file. The config file location is
    controlled by the PythonOption ConfigFile in the httpd config.

    Backwards compatibility:
        - if ConfigFile is not set, opts are loaded from http config
        - if ConfigFile is set, then the http config must not provide Koji options
        - In a future version we will load the default hub config regardless
        - all PythonOptions (except ConfigFile) are now deprecated and support for them
          will disappear in a future version of Koji
    """
    logger = logging.getLogger("koji")
    #get our config file(s)
    cf = environ.get('koji.hub.ConfigFile', '/etc/koji-hub/hub.conf')
    cfdir = environ.get('koji.hub.ConfigDir', '/etc/koji-hub/hub.conf.d')
    if cfdir:
        configs = koji.config_directory_contents(cfdir)
    else:
        configs = []
    if cf and os.path.isfile(cf):
        configs.append(cf)
    if configs:
        config = RawConfigParser()
        config.read(configs)
    else:
        config = None
    cfgmap = [
        #option, type, default
        ['DBName', 'string', None],
        ['DBUser', 'string', None],
        ['DBHost', 'string', None],
        ['DBhost', 'string', None],   # alias for backwards compatibility
        ['DBPort', 'integer', None],
        ['DBPass', 'string', None],
        ['KojiDir', 'string', None],

        ['AuthPrincipal', 'string', None],
        ['AuthKeytab', 'string', None],
        ['ProxyPrincipals', 'string', ''],
        ['HostPrincipalFormat', 'string', None],

        ['DNUsernameComponent', 'string', 'CN'],
        ['ProxyDNs', 'string', ''],

        ['CheckClientIP', 'boolean', True],

        ['LoginCreatesUser', 'boolean', True],
        ['KojiWebURL', 'string', 'http://localhost.localdomain/koji'],
        ['EmailDomain', 'string', None],
        ['NotifyOnSuccess', 'boolean', True],
        ['DisableNotifications', 'boolean', False],

        ['Plugins', 'string', ''],
        ['PluginPath', 'string', '/usr/lib/koji-hub-plugins'],

        ['KojiDebug', 'boolean', False],
        ['KojiTraceback', 'string', None],
        ['VerbosePolicy', 'boolean', False],
        ['EnableFunctionDebug', 'boolean', False],

        ['LogLevel', 'string', 'WARNING'],
        ['LogFormat', 'string', '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s'],

        ['MissingPolicyOk', 'boolean', True],
        ['EnableMaven', 'boolean', False],
        ['EnableWin', 'boolean', False],

        ['RLIMIT_AS', 'string', None],
        ['RLIMIT_CORE', 'string', None],
        ['RLIMIT_CPU', 'string', None],
        ['RLIMIT_DATA', 'string', None],
        ['RLIMIT_FSIZE', 'string', None],
        ['RLIMIT_MEMLOCK', 'string', None],
        ['RLIMIT_NOFILE', 'string', None],
        ['RLIMIT_NPROC', 'string', None],
        ['RLIMIT_OFILE', 'string', None],
        ['RLIMIT_RSS', 'string', None],
        ['RLIMIT_STACK', 'string', None],

        ['MemoryWarnThreshold', 'integer', 5000],
        ['MaxRequestLength', 'integer', 4194304],

        ['LockOut', 'boolean', False],
        ['ServerOffline', 'boolean', False],
        ['OfflineMessage', 'string', None],
    ]
    opts = {}
    for name, dtype, default in cfgmap:
        key = ('hub', name)
        if config and config.has_option(*key):
            if dtype == 'integer':
                opts[name] = config.getint(*key)
            elif dtype == 'boolean':
                opts[name] = config.getboolean(*key)
            else:
                opts[name] = config.get(*key)
            continue
        opts[name] = default
    if opts['DBHost'] is None:
        opts['DBHost'] = opts['DBhost']
    # load policies
    # (only from config file)
    if config and config.has_section('policy'):
        #for the moment, we simply transfer the policy conf to opts
        opts['policy'] = dict(config.items('policy'))
    else:
        opts['policy'] = {}
    for pname, text in six.iteritems(_default_policies):
        opts['policy'].setdefault(pname, text)
    # use configured KojiDir
    if opts.get('KojiDir') is not None:
        koji.BASEDIR = opts['KojiDir']
        koji.pathinfo.topdir = opts['KojiDir']
    return opts
    opts = parser.parse_args()

    inifile = opts.inifile

    # check that inifile contains full absolute path
    if not os.path.isabs(inifile):
        print(
            "Error... must supply the full absolute path of the configuration file.",
            file=sys.stderr)
        sys.exit(1)

    startcron = False  # variable to say whether to create the crontab (if this is the first time the script is run then this will be changed to True later)
    cronid = 'knopeJob'  # default ID for the crontab job

    # open and parse config file
    cp = RawConfigParser()
    try:
        cp.read(inifile)
    except:
        print("Error... cannot parse configuration file '%s'" % inifile,
              file=sys.stderr)
        sys.exit(1)

    # if configuration file has previous_endtimes option then the cronjob must have started
    if not cp.has_option(
            'times',
            'previous_endtimes'):  # make sure to start the crontab job
        startcron = True

    # open and parse the run configuration file
    if cp.has_option('configuration', 'file'):
示例#7
0
    try:
        return cfg.get(a, b)
    except:
        return c


DIR = appdirs.AppDirs('cardmode')

# ENSURE PROGRAM DIRS
for progdir in (DIR.user_config_dir, ):
    try:
        os.makedirs(progdir)
    except OSError:
        pass

cfg = RawConfigParser()

# LOAD CONFIG
CONFIG_FN = os.path.join(DIR.user_config_dir, 'config.ini')
try:
    cfg.readfp(open(CONFIG_FN))
except:
    open(CONFIG_FN, 'a+').close()
    os.chmod(CONFIG_FN, 0o600)  # u=rw
    with open(CONFIG_FN, 'wb') as cfgfile:
        tmpcfg = ConfigParser()
        tmpcfg.add_section('defaults')
        tmpcfg.set('defaults', 'service', 'trello')
        tmpcfg.set('defaults', 'board', '')
        tmpcfg.add_section('trello')
        tmpcfg.set('trello', 'apikey', '')
示例#8
0
 def __init__(self, data):
     super(INITokenizer, self).__init__(data)
     self.config = RawConfigParser()
     f = StringIO(data)
     self.config.readfp(f)
示例#9
0
import os
import os.path

from six.moves.configparser import RawConfigParser, NoOptionError

from trollsift import Parser, compose
from pytroll_collectors import trigger
from pytroll_collectors import region_collector
from posttroll import message, publisher
try:
    from satpy.resample import get_area_def
except ImportError:
    from mpop.projector import get_area_def

LOGGER = logging.getLogger(__name__)
CONFIG = RawConfigParser()
PUB = None


def get_metadata(fname):
    """Parse metadata from the file."""
    res = None
    for section in CONFIG.sections():
        try:
            parser = Parser(CONFIG.get(section, "pattern"))
        except NoOptionError:
            continue
        if not parser.validate(fname):
            continue
        res = parser.parse(fname)
        res.update(dict(CONFIG.items(section)))
示例#10
0
def parse_module_definition(mod_info):

	database_names = set()

	for root, dirs, filenames in os.walk(mod_dir):
		for f in filenames: 
			if f.endswith(".txt"):
				mod_def = os.path.join(root,f) 
				fread = open(mod_def,'r')
				contents = fread.read()

				parser = RawConfigParser()
				parser.read(mod_def)

				mod_name = mod_def
				query_name = parser.get('Query Metadata', 'QUERY_NAME')
				database_name = parser.get('Database Metadata', 'DATABASE').split(',')
				activity = parser.get('Query Metadata', 'ACTIVITY')
				key_timestamp = parser.get('Query Metadata', 'KEY_TIMESTAMP')

				for database in database_name:
					database_names.add(database)

				for db in database_name:
					uniquekey = mod_def + "#" + db
					mod_info[uniquekey] = []

				if version == 'yolo':
					for section in parser.sections():
						try:
							if "SQL Query" in section:
								sql_query = parser.get(section,'QUERY')
								mod_info[uniquekey] = [query_name, db, activity, key_timestamp, sql_query]
						except:
							pass
				else:			
					for section in parser.sections():
						try:
							if version in section:
								sql_query = parser.get(section,'QUERY')
								mod_info[uniquekey] = [query_name, db, activity, key_timestamp, sql_query]
						except:
							pass

	print("\n==> Parsing", len(mod_info), "modules (Note: Some modules may be run on more than one database.)")

	count = 1
	modules = set()

	for item in sorted(mod_info):
			dbs = item.split('#')
			for mod in dbs:
				modules.add(dbs[0])
			print("\t[" + str(count) + "] " + str(dbs[0]) + " on " + str(dbs[1]))
			count = count + 1

	print("\n==> Will lazily run APOLLO on " + str(len(modules)) + " unique modules and " + str(len(database_names))+ " unique databases.") 

	print("\n==> Searching for database files...this may take a hot minute...")
	print()
	for root, dirs, filenames in os.walk(data_dir):
		for f in filenames:
			if f in database_names:
				for mod_def, mod_data in mod_info.items():
					if mod_data:
						if mod_data[1] == f:
							mod_info[mod_def].append(os.path.join(root,f))

	for mod_def, mod_data in mod_info.items():
		mod_def_split = mod_def.split('#')
		if mod_data:
			print(mod_def_split[0] + " on " + mod_def_split[1], ":",  len(mod_data)-5, "databases.")
			run_module(mod_def,mod_data[0],mod_data[5:],mod_data[2],mod_data[3],mod_data[4])
			print()
		else:
			print(mod_def_split[0] + " on " + mod_def_split[1], ": Module not supported for version of data provided.")
			print()	
except ImportError:
    from urllib.parse import urlparse
import posttroll.subscriber
from posttroll.publisher import Publish
import xml.etree.ElementTree as ET
from datetime import datetime
import os.path

from trollsched.satpass import Pass
from trollsched.drawing import save_fig
from trollsched import (SATELLITE_NAMES, INSTRUMENT)

LOG = logging.getLogger(__name__)

CFG_DIR = os.environ.get('PYTROLL_SCHEDULE_CONFIG_DIR', './')
CONF = RawConfigParser()
CFG_FILE = os.path.join(CFG_DIR, "pytroll_schedule_config.cfg")
LOG.debug("Config file = " + str(CFG_FILE))
if not os.path.exists(CFG_FILE):
    raise IOError('Config file %s does not exist!' % CFG_FILE)

CONF.read(CFG_FILE)
OPTIONS = {}
for option, value in CONF.items("DEFAULT"):
    OPTIONS[option] = value

#: Default time format
_DEFAULT_TIME_FORMAT = '%Y-%m-%d %H:%M:%S'

#: Default log format
_DEFAULT_LOG_FORMAT = '[%(levelname)s: %(asctime)s : %(name)s] %(message)s'
示例#12
0
class TestImageScaler(unittest.TestCase):

    # Create fake images with different modes
    data = np.random.randint(0, 255, (100, 100), dtype=np.uint8)
    adata = 255*np.ones((100, 100), dtype=np.uint8)
    adata[:10, :10] = 0
    img_l = Image.fromarray(data, mode='L')
    img_la = Image.fromarray(np.dstack((data, adata)), mode='LA')
    img_rgb = Image.fromarray(np.dstack((data, data, data)), mode='RGB')
    img_rgba = Image.fromarray(np.dstack((data, data, data, adata)),
                               mode='RGBA')

    # Set PPP_CONFIG_DIR
    os.environ['PPP_CONFIG_DIR'] = os.path.join(os.path.dirname(__file__),
                                                'data')
    # Read config
    config = RawConfigParser()
    config.read(os.path.join(os.path.dirname(__file__),
                             'data', 'scale_images.ini'))

    def test_get_crange(self):
        def test_vals(res):
            for val in res:
                self.assertEqual(val[0], 0)
                self.assertEqual(val[1], 1)

        # Test that color ranges have correct
        res = sca._get_crange(1)
        test_vals(res)
        self.assertTrue(len(res) == 1)

        res = sca._get_crange(2)
        test_vals(res)

        res = sca._get_crange(3)
        self.assertTrue(len(res) == 3)
        test_vals(res)

    def test_pil_to_xrimage(self):
        res = sca._pil_to_xrimage(self.img_l.copy(), None,
                                  fill_value=0)
        self.assertEqual(res.mode, 'L')
        res = sca._pil_to_xrimage(self.img_l.copy(), None,
                                  fill_value=None)
        self.assertEqual(res.mode, 'L')
        res = sca._pil_to_xrimage(self.img_la.copy(), None,
                                  fill_value=42)
        self.assertEqual(res.mode, 'L')
        res = sca._pil_to_xrimage(self.img_la.copy(), None,
                                  fill_value=None)
        self.assertEqual(res.mode, 'LA')
        res = sca._pil_to_xrimage(self.img_rgb.copy(), None,
                                  fill_value=42)
        self.assertEqual(res.mode, 'RGB')
        res = sca._pil_to_xrimage(self.img_rgb.copy(), None,
                                  fill_value=None)
        self.assertEqual(res.mode, 'RGB')
        res = sca._pil_to_xrimage(self.img_rgba.copy(), None,
                                  fill_value=42)
        self.assertEqual(res.mode, 'RGB')
        res = sca._pil_to_xrimage(self.img_rgba.copy(), None,
                                  fill_value=None)
        self.assertEqual(res.mode, 'RGBA')

    @pytest.mark.xfail
    def test_save_image(self):
        out_dir = tempfile.gettempdir()
        fname = os.path.join(out_dir, 'img.png')
        sca.save_image(self.img_rgba.copy(), fname)
        fname = os.path.join(out_dir, 'img.tif')
        sca.save_image(self.img_rgba.copy(), fname)

    def test_crop_image(self):
        res = sca.crop_image(self.img_rgb.copy(), (3, 3, 7, 7))
        self.assertEqual(res.size[0], 4)
        self.assertEqual(res.size[1], 4)
        # Test wrapping, ie. append data from left edge of the image to the
        # right edge
        res = sca.crop_image(self.img_rgb.copy(), (-3, -3, 120, 700))
        self.assertEqual(res.size[0], 120)
        self.assertEqual(res.size[1], 100)
        # All the pixels of the left edge of the  "extended" are needs to
        # match the left edge of the originating image
        for i in range(self.img_rgb.size[1]):
            self.assertEqual(self.img_rgb.getpixel((0, i)),
                             res.getpixel((100, i)))

    def test_resize_image(self):
        res = sca.resize_image(self.img_rgb.copy(), (30, 30))
        self.assertEqual(res.size[0], 30)
        self.assertEqual(res.size[1], 30)
        res = sca.resize_image(self.img_rgb.copy(), (300, 300))
        self.assertEqual(res.size[0], 300)
        self.assertEqual(res.size[1], 300)

    def test_get_text_settings(self):
        # No text settings in config, should give default values
        res = sca._get_text_settings(self.config, '/empty/text/settings')
        self.assertTrue(res['loc'] ==
                        sca.DEFAULT_TEXT_SETTINGS['text_location'])
        self.assertTrue(res['font_fname'] is None)
        self.assertEqual(res['font_size'],
                         int(sca.DEFAULT_TEXT_SETTINGS['font_size']))
        text_color = [int(val) for val in
                      sca.DEFAULT_TEXT_SETTINGS['text_color'].split(',')]
        text_bg_color = \
            [int(val) for val in
             sca.DEFAULT_TEXT_SETTINGS['text_bg_color'].split(',')]
        for i in range(3):
            self.assertEqual(res['text_color'][i], text_color[i])
            self.assertEqual(res['bg_color'][i], text_bg_color[i])

        # Settings are given
        res = sca._get_text_settings(self.config, '/text/settings')
        self.assertEqual(res['x_marginal'], 20)
        self.assertEqual(res['y_marginal'], 5)
        self.assertEqual(res['bg_extra_width'], 5)

    def test_get_font(self):
        res = sca._get_font('non_existent', 12)
        self.assertTrue(isinstance(res, ImageFont.ImageFont))
        res = sca._get_font(os.path.join(os.path.dirname(__file__),
                                         'data', 'DejaVuSerif.ttf'), 12)
        self.assertTrue(isinstance(res, ImageFont.FreeTypeFont))

    def test_add_text(self):
        text_settings = sca._get_text_settings(self.config, '/text/settings')
        # Replace placeholder font path with one that certainly exists
        text_settings['font_fname'] = os.path.join(os.path.dirname(__file__),
                                                   'data', 'DejaVuSerif.ttf')
        # Default text settings (black on white)
        res = sca.add_text(self.img_l.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'L')
        res = sca.add_text(self.img_la.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'LA')
        res = sca.add_text(self.img_rgb.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGB')
        res = sca.add_text(self.img_rgba.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGBA')

        # Black on blue
        text_settings['bg_color'] = (200, 200, 255)
        res = sca.add_text(self.img_l.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGB')
        res = sca.add_text(self.img_la.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGBA')
        res = sca.add_text(self.img_rgb.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGB')
        res = sca.add_text(self.img_rgba.copy(), 'PL', text_settings)
        self.assertTrue(res.mode == 'RGBA')

    def test_is_rgb_color(self):
        res = sca._is_rgb_color(((0, 0, 0), ))
        self.assertFalse(res)
        res = sca._is_rgb_color(((1, 0, 0), ))
        self.assertTrue(res)
        res = sca._is_rgb_color(((0, 0, 0), (1, 0, 0), ))
        self.assertTrue(res)

    def test_get_text_and_box_locations(self):
        shape = self.img_rgb.size
        textsize = (18, 11)
        marginals = (10, 3)
        bg_extra_width = 4

        text_loc, box_loc = \
            sca._get_text_and_box_locations(shape, 'SW',
                                            textsize, marginals,
                                            bg_extra_width)

        # Test only relevant things: x and y corners and that box is
        # wider than text
        self.assertEqual(text_loc[0], 10)
        self.assertEqual(text_loc[1],
                         shape[1] - textsize[1] - 2 * marginals[1])
        self.assertLessEqual(box_loc[0], text_loc[0])
        self.assertEqual(box_loc[1], text_loc[1])
        self.assertGreaterEqual(box_loc[2], text_loc[0] + textsize[0])
        self.assertEqual(box_loc[3], shape[1])

        text_loc, box_loc = \
            sca._get_text_and_box_locations(shape, 'NE',
                                            textsize, marginals,
                                            bg_extra_width)

        # Test only relevant things: x and y corners and that box is
        # wider than text
        self.assertEqual(text_loc[0],
                         shape[0] - textsize[0] - marginals[0])
        self.assertEqual(text_loc[1], 0)
        self.assertLessEqual(box_loc[0], text_loc[0])
        self.assertEqual(box_loc[1], text_loc[1])
        self.assertGreaterEqual(box_loc[2], text_loc[0] + textsize[0])
        self.assertGreaterEqual(box_loc[3], textsize[1] - 1)

        text_loc, box_loc = \
            sca._get_text_and_box_locations(shape, 'SC',
                                            textsize, marginals,
                                            bg_extra_width)

        # Test only centering
        self.assertEqual(text_loc[0], (shape[0] - textsize[0]) / 2)
        self.assertLessEqual(box_loc[0], text_loc[0])
        self.assertGreaterEqual(box_loc[2], text_loc[0] + textsize[0])

    def test_adjust_img_mode_for_text(self):
        res = sca._adjust_img_mode_for_text(self.img_l, ((0, 0, 0), ))
        self.assertTrue(res.mode == 'L')
        res = sca._adjust_img_mode_for_text(self.img_l, ((1, 0, 0), ))
        self.assertTrue(res.mode == 'RGB')
        res = sca._adjust_img_mode_for_text(self.img_la, ((1, 0, 0), ))
        self.assertTrue(res.mode == 'RGBA')
        res = sca._adjust_img_mode_for_text(self.img_rgb, ((1, 0, 0), ))
        self.assertTrue(res.mode == 'RGB')
        res = sca._adjust_img_mode_for_text(self.img_rgba, ((1, 0, 0), ))
        self.assertTrue(res.mode == 'RGBA')

    def test_read_image(self):
        out_dir = tempfile.gettempdir()
        fname = os.path.join(out_dir, 'img.png')
        sca.save_image(self.img_rgba.copy(), fname)
        res = sca.read_image(fname)
        res = np.array(res.getdata(), dtype=np.float32)
        src = np.array(self.img_rgba.getdata(), dtype=np.float32)
        self.assertEqual(np.max(res - src), 0)

    def test_update_existing_image(self):
        out_dir = tempfile.gettempdir()
        fname = os.path.join(out_dir, 'img.png')
        sca.save_image(self.img_rgba.copy(), fname)
        data = 255 * np.ones(self.data.shape, dtype=np.uint8)
        # Replace part of the alpha channel with zeros, so that no all of the
        # image is updated
        data[0, :] *= 0
        data_stack = np.dstack((data, data, data, data))
        new_img = Image.fromarray(data_stack, mode='RGBA')
        res = sca.update_existing_image(fname, new_img)
        res = np.array(res)
        self.assertTrue(np.all(res[1:, :, :] == 255))
        self.assertTrue(np.all(res[0, :, :-1] ==
                               np.array(self.img_rgba)[0, :, :-1]))

        # Update L with L
        sca.save_image(self.img_l.copy(), fname)
        res = sca.update_existing_image(fname, self.img_l.copy())
        self.assertTrue(res.mode == 'L')
        # Update L with LA
        res = sca.update_existing_image(fname, self.img_la.copy())
        self.assertTrue(res.mode == 'LA')
        # Update L with RGB
        res = sca.update_existing_image(fname, self.img_rgb.copy())
        self.assertTrue(res.mode == 'RGB')
        # Update L with RGBA
        res = sca.update_existing_image(fname, self.img_rgba.copy())
        self.assertTrue(res.mode == 'RGBA')

        # Update LA with L
        sca.save_image(self.img_la.copy(), fname)
        res = sca.update_existing_image(fname, self.img_l.copy())
        self.assertTrue(res.mode == 'L')
        # Update LA with LA
        res = sca.update_existing_image(fname, self.img_la.copy())
        self.assertTrue(res.mode == 'LA')
        # Update LA with RGB
        res = sca.update_existing_image(fname, self.img_rgb.copy())
        self.assertTrue(res.mode == 'RGB')
        # Update LA with RGBA
        res = sca.update_existing_image(fname, self.img_rgba.copy())
        self.assertTrue(res.mode == 'RGBA')

        # Update RGB with L
        sca.save_image(self.img_rgb.copy(), fname)
        res = sca.update_existing_image(fname, self.img_l.copy())
        self.assertTrue(res.mode == 'L')
        # Update RGB with LA
        res = sca.update_existing_image(fname, self.img_la.copy())
        self.assertTrue(res.mode == 'LA')
        # Update RGB with RGB
        res = sca.update_existing_image(fname, self.img_rgb.copy())
        self.assertTrue(res.mode == 'RGB')
        # Update RGB with RGBA
        res = sca.update_existing_image(fname, self.img_rgba.copy())
        self.assertTrue(res.mode == 'RGBA')

        # Update RGBA with L
        sca.save_image(self.img_rgba.copy(), fname)
        res = sca.update_existing_image(fname, self.img_l.copy())
        self.assertTrue(res.mode == 'L')
        # Update RGBA with LA
        res = sca.update_existing_image(fname, self.img_la.copy())
        self.assertTrue(res.mode == 'LA')
        # Update RGBA with RGB
        res = sca.update_existing_image(fname, self.img_rgb.copy())
        self.assertTrue(res.mode == 'RGB')
        # Update RGBA with RGBA
        res = sca.update_existing_image(fname, self.img_rgba.copy())
        self.assertTrue(res.mode == 'RGBA')

    def test_add_image_as_overlay(self):
        res = sca.add_image_as_overlay(self.img_l.copy(), self.img_rgba)
        res = sca.add_image_as_overlay(self.img_la.copy(), self.img_rgba)
        res = sca.add_image_as_overlay(self.img_rgb.copy(), self.img_rgba)
        res = sca.add_image_as_overlay(self.img_rgba.copy(), self.img_rgba)
        data = self.data.copy()
        data[:, 10:20] = 255
        overlay = Image.fromarray(np.dstack((data, data, data, data)),
                                  mode='RGBA')
        res = sca.add_image_as_overlay(self.img_rgb.copy(), overlay)
        self.assertEqual(res.getdata(0)[10], 255)

    @patch("pytroll_collectors.image_scaler.ListenerContainer")
    @patch("pytroll_collectors.image_scaler.ContourWriter")
    def test_ImageScaler(self, cwriter, listener):
        scaler = sca.ImageScaler(self.config)
        scaler.subject = '/scaler'
        filename = '201702071200_Meteosat-10_EPSG4326_spam.png'
        filename = os.path.join(os.path.dirname(__file__),
                                'data', filename)

        res = scaler._get_conf_with_default('areaname')
        self.assertTrue(res == self.config.get('/scaler',
                                               'areaname'))

        res = scaler._get_bool('only_backup')
        self.assertTrue(res == sca.DEFAULT_CONFIG_VALUES['only_backup'])
        res = scaler._get_bool('out_dir')
        self.assertFalse(res)

        scaler._get_text_settings()
        self.assertTrue(
            scaler.text_pattern == sca.DEFAULT_CONFIG_VALUES['text_pattern'])
        self.assertTrue(isinstance(scaler.text_settings, dict))

        scaler.subject = '/empty/text/settings'
        with self.assertRaises(KeyError):
            scaler._get_mandatory_config_items()
        scaler.subject = '/not/existing'
        with self.assertRaises(KeyError):
            scaler._get_mandatory_config_items()
        scaler.subject = '/scaler'

        scaler._get_mandatory_config_items()
        self.assertTrue(scaler.areaname == self.config.get('/scaler',
                                                           'areaname'))
        self.assertTrue(scaler.in_pattern == self.config.get('/scaler',
                                                             'in_pattern'))
        self.assertTrue(scaler.out_pattern == self.config.get('/scaler',
                                                              'out_pattern'))

        scaler.fileparts.update(parse(scaler.out_pattern,
                                      os.path.basename(filename)))
        scaler._tidy_platform_name()
        self.assertTrue(scaler.fileparts['platform_name'] == "Meteosat10")

        scaler._update_current_config()
        # Test few config items that the have the default values
        self.assertEqual(scaler.timeliness,
                         sca.DEFAULT_CONFIG_VALUES['timeliness'])
        self.assertEqual(len(scaler.tags),
                         len(sca.DEFAULT_CONFIG_VALUES['tags']))
        # And the config values
        self.assertTrue(scaler.areaname == self.config.get('/scaler',
                                                           'areaname'))
        self.assertTrue(scaler.in_pattern == self.config.get('/scaler',
                                                             'in_pattern'))
        self.assertTrue(scaler.out_pattern == self.config.get('/scaler',
                                                              'out_pattern'))

        scaler._parse_crops()
        self.assertEqual(len(scaler.crops), 0)
        scaler._parse_sizes()
        self.assertEqual(len(scaler.sizes), 0)
        scaler._parse_tags()
        self.assertEqual(len(scaler.tags), 0)

        scaler.subject = '/crops/sizes/tags'
        scaler._update_current_config()
        scaler._parse_crops()
        self.assertEqual(len(scaler.crops), 2)
        self.assertEqual(len(scaler.crops[0]), 4)
        self.assertTrue(scaler.crops[1] is None)

        scaler._parse_sizes()
        self.assertEqual(len(scaler.sizes), 3)
        self.assertEqual(len(scaler.sizes[0]), 2)

        scaler._parse_tags()
        self.assertEqual(len(scaler.tags), 3)

        # Default text settings (black on white)
        res = scaler._add_text(self.img_l.copy(), 'PL')
        self.assertTrue(res.mode == 'L')
        res = scaler._add_text(self.img_la.copy(), 'PL')
        self.assertTrue(res.mode == 'LA')
        res = scaler._add_text(self.img_rgb.copy(), 'PL')
        self.assertTrue(res.mode == 'RGB')
        res = scaler._add_text(self.img_rgba.copy(), 'PL')
        self.assertTrue(res.mode == 'RGBA')

        scaler.fileparts.update(parse(scaler.out_pattern,
                                      os.path.basename(filename)))
        tslot = dt.datetime.utcnow()
        # File that doesn't exist
        res = scaler._check_existing(tslot)
        self.assertEqual(len(res), 0)
        # Existing file with "is_backup" set to False so we should get a full
        # set of metadata
        scaler.out_dir = os.path.join(os.path.dirname(__file__),
                                      'data')
        tslot = scaler.fileparts['time']
        res = scaler._check_existing(tslot)
        self.assertEqual(res['time'], tslot)
        self.assertEqual(res['areaname'], scaler.areaname)
        self.assertEqual(res['platform_name'],
                         scaler.fileparts['platform_name'])
        self.assertEqual(res['composite'], 'spam')
        # Existing file with "is_backup" set to True
        scaler.is_backup = True
        res = scaler._check_existing(tslot)
        self.assertIsNone(res)
示例#13
0
def dump_config(f=None):
    fclose = lambda: None
    if isinstance(f, six.string_types):
        f = open(f, 'w')
        fclose = f.close
    if f is None:
        f = sys.stdout
    try:
        handlers = {}
        formatters = {}
        loggers = {}

        def process_formatter(formatter):
            if formatter is None:
                return dict(sectname=None)
            if id(formatter) in formatters:
                return formatters[id(formatter)]
            klass = formatter.__class__
            fmtinfo = {
                'format': formatter._fmt,
                'datefmt': formatter.datefmt or ''
            }
            if klass is not logging.Formatter:
                fmtinfo['class'] = klass.__module__ + '.' + klass.__name__
            style = getattr(formatter, '_style', None)
            if style:
                for k, (tp, df) in getattr(logging, '_STYLES', {}).items():
                    if isinstance(style, tp):
                        if k != '%':
                            fmtinfo['style'] = k
                        break
            formatters[id(formatter)] = fmtinfo
            fmtinfo['sectname'] = 'form%d' % (len(formatters), )
            return fmtinfo

        def process_stream(stream):
            if stream is None:
                return None
            if stream is sys.stdout:
                return LiteralExpr('sys.stdout')
            if stream is sys.stderr:
                return LiteralExpr('sys.stderr')
            return stream.name

        def process_handler(handler):
            if handler is None:
                return dict(sectname=None)
            if id(handler) in handlers:
                return handlers[id(handler)]
            klass = handler.__class__
            try:
                reduced_obj = handler.__reduce_ex__(2)
                assert reduced_obj[0] is copy_reg.__newobj__
                assert isinstance(reduced_obj[1], tuple)
                assert len(reduced_obj[1]) == 1
                assert reduced_obj[1][0] is klass
            except Exception:
                reduced_obj = None
            if reduced_obj is None:
                hdlrdict = handler.__dict__.copy()
            else:
                hndldict = reduced_obj[2].copy()
            if klass is getattr(logging, '_StderrHandler', None):
                hndlinfo = {'class': '_StderrHandler', 'args': '()'}
            elif klass is logging.FileHandler:
                if handler.delay is not False:
                    args = os.path.relpath(
                        handler.baseFilename
                    ), handler.mode, handler.encoding, handler.delay
                elif handler.encoding is not None:
                    args = os.path.relpath(
                        handler.baseFilename), handler.mode, handler.encoding
                else:
                    args = os.path.relpath(handler.baseFilename), handler.mode
                hndlinfo = {
                    'class': 'FileHandler',
                    'args': repr(args),
                    'filename': args[0],
                    'mode': args[1]
                }
            elif klass is logging.StreamHandler:
                args = process_stream(handler.stream),
                hndlinfo = {'class': 'StreamHandler', 'args': repr(args)}
            elif isinstance(klass, logging.FileHandler):
                args = os.path.relpath(handler.baseFilename), handler.mode
                hndlinfo = {
                    'args': repr(args),
                    'filename': args[0],
                    'mode': args[1]
                }
            else:
                assert klass.__module__ == 'logging', "Unknown class of handler " + repr(
                    handler)
            if 'class' not in hndlinfo:
                hndlinfo['class'] = klass.__module__ + '.' + klass.__name__
            if getattr(handler, 'formatter', None):
                hndlinfo['formatter'] = process_formatter(
                    handler.formatter)['sectname']
            hndlinfo['level'] = logging.getLevelName(handler.level)
            if issubclass(klass,
                          logging.handlers.MemoryHandler) and handler.target:
                hndlinfo['target'] = process_handler(handler.target)
            handlers[id(handler)] = hndlinfo
            hndlinfo['sectname'] = handler._name or 'hand%d' % (
                len(handlers), )
            return hndlinfo

        def process_logger(logger):
            if isinstance(logger, logging.PlaceHolder):
                return
            if logger is None:
                # used as "parent" of root logger
                return dict(name='', qualname='')
            if id(logger) in loggers:
                return loggers[id(logger)]
            channel = name = logger.name
            if logger is logger.root:
                channel = name = ''
            parent = process_logger(logger.parent)
            pnamel = len(parent['name'])
            if pnamel and name[:pnamel + 1] == parent['name'] + '.':
                channel = name[pnamel + 1:]
            loginfo = dict(
                name=name,
                channel=channel,
                qualname=name or '(root)',
                sectname=(name or 'root').replace('.', '_'),
                level=logging.getLevelName(logger.level),
                parent=parent['qualname'],
            )
            if not logger.propagate:
                loginfo['propagate'] = '0'
            loginfo['handlers'] = ','.join(
                hndlinfo['sectname']
                for hndlinfo in (process_handler(handler)
                                 for handler in logger.handlers) if hndlinfo)
            loggers[id(logger)] = loginfo
            #loginfo['sectname'] = name and 'log%d'%(len(loggers),) or 'root'
            return loginfo

        process_logger(logging.root)
        for logname in sorted(list(logging.root.manager.loggerDict.keys())):
            logger = logging.root.manager.loggerDict[logname]
            process_logger(logger)
        assert id(logging.root) in loggers
        assert all('sectname' in loginfo for loginfo in loggers.values())
        assert loggers[id(logging.root)]['sectname'] == 'root'
        assert all('sectname' in loginfo for loginfo in loggers.values())
        conf = RawConfigParser()
        conf.add_section('loggers')
        conf.set('loggers', 'keys',
                 ','.join(loginfo['sectname'] for loginfo in loggers.values()))
        if handlers:
            conf.add_section('handlers')
            conf.set(
                'handlers', 'keys',
                ','.join(hndlinfo['sectname']
                         for hndlinfo in handlers.values()))
        if formatters:
            conf.add_section('formatters')
            conf.set(
                'formatters', 'keys',
                ','.join(fmtinfo['sectname']
                         for fmtinfo in formatters.values()))
        for loginfo in loggers.values():
            sectname = 'logger_' + loginfo.pop('sectname')
            loginfo.pop('name')
            conf.add_section(sectname)
            for k, v in loginfo.items():
                conf.set(sectname, k, v)
        for hdlrinfo in handlers.values():
            sectname = 'handler_' + hdlrinfo.pop('sectname')
            conf.add_section(sectname)
            for k, v in hdlrinfo.items():
                conf.set(sectname, k, v)
        for fmtinfo in formatters.values():
            sectname = 'formatter_' + fmtinfo.pop('sectname')
            conf.add_section(sectname)
            for k, v in fmtinfo.items():
                conf.set(sectname, k, v)
        if sys.version_info[0:2] >= (3, 4):
            conf.write(f, False)
        else:
            conf.write(f)
    finally:
        fclose()
示例#14
0
    def handle(self, *args, **options):
        """
        Command core.
        """
        settings_template_file = options["settings_template_file"]
        settings_file_path = options["settings_file_path"]
        force_secret_key = options["force_secretkey"]
        if not force_secret_key:
            force_secret_key = self.default_force_secret_key
        if not settings_template_file:
            raise CommandError(
                "Parameter settings_template_file undefined.\nUsage: %s" % self.usage
            )
        if not settings_file_path:
            raise CommandError(
                "Parameter settings_file_path undefined.\nUsage: %s" % self.usage
            )
        if not os.path.exists(settings_template_file):
            raise CommandError("The settings template file doesn't exists.")

        self.stdout.write("** Configuration file generation: **")
        if os.path.exists(settings_file_path):
            override = get_input(
                "A configuration file already exists at %s. "
                "Would you override it ? (y/N) : " % settings_file_path
            )
            if override.upper() != "Y":
                raise CommandError("Generation cancelled.")

        config = RawConfigParser()
        config.read(settings_template_file)

        input_secret_key = False
        secret_key = None
        if not force_secret_key:
            generate_secret_key = get_input(
                "Do you want to generate the secret key for Django ? (Y/n) : "
            )
            input_secret_key = generate_secret_key.upper() == "N"
        if input_secret_key:
            secret_key = get_input("Enter your secret key : ")
            if not secret_key:
                raise CommandError(
                    "Django secret key is needed for encryption. Generation cancelled."
                )
        else:
            self.stdout.write("Django secret key generation !")

        self.stdout.write("\n** Filling values for configuration file content **")
        variable_regex = re.compile(r" *{(.+)} *")
        properties = {}
        for section in config.sections():
            properties[section] = {}
            for key, value in config.items(section):
                match_groups = variable_regex.match(value)
                if match_groups:
                    value_type = match_groups.group(1).strip().upper()
                    value = self.get_value(section, key, value_type)
                    properties[section][key] = value
        max_retry = 0 if input_secret_key else 3
        retry = 0
        encrypted_properties = properties.copy()
        while retry <= max_retry and self.encrypted_field:
            if secret_key is None:
                secret_key = get_random_secret_key().replace("%", "0")
            try:
                for section, key in self.encrypted_field:
                    value = encryption.encrypt(properties[section][key], secret_key)
                    encryption.decrypt(value, secret_key)
                    encrypted_properties[section][key] = value
                retry = max_retry
            except ValueError:
                secret_key = None
            retry += 1

        if secret_key is None:
            raise CommandError(
                "Error while encoding / decoding passwords with the secret key."
                "Retried %s. Generation cancelled." % max_retry
            )

        for section, key in self.django_keys:
            encrypted_properties[section][key] = secret_key

        # Fill config file
        for section, values in encrypted_properties.items():
            for key, value in values.items():
                config.set(section, key, value)
        self.stdout.write("\nWriting file at %s:" % settings_file_path)
        settings_directory = os.path.dirname(settings_file_path)
        if not os.path.exists(settings_directory):
            os.makedirs(settings_directory)
        with open(settings_file_path, "w") as config_file:
            config.write(config_file)
        self.stdout.write(
            self.style.SUCCESS("Configuration file successfully generated !")
        )