コード例 #1
0
ファイル: config.py プロジェクト: LinguList/lingpy
    def __init__(self, name, default=None, **kw):
        """Initialization.

        :param name: Basename for the config file (suffix .ini will be appended).
        :param default: Default content of the config file.
        """
        self.name = name
        self.default = default
        config_dir = Path(kw.pop('config_dir', None) or DIR)
        RawConfigParser.__init__(self, kw, allow_no_value=True)
        if self.default:
            if PY3:
                fp = io.StringIO(self.default)
            else:
                fp = io.BytesIO(self.default.encode('utf8'))
            self.readfp(fp)

        cfg_path = config_dir.joinpath(name + '.ini')
        if cfg_path.exists():
            assert cfg_path.is_file()
            self.read(cfg_path.as_posix())
        else:
            if not config_dir.exists():
                try:
                    config_dir.mkdir()
                except OSError:  # pragma: no cover
                    # this happens when run on travis-ci, by a system user.
                    pass
            if config_dir.exists():
                with open(cfg_path.as_posix(), 'w') as fp:
                    self.write(fp)
        self.path = cfg_path
コード例 #2
0
ファイル: auth.py プロジェクト: bigblueswope/python-projects
class CredentialStore(object):
    def __init__(self, **kwargs):
        self.credential_search_path = [
            os.path.join(os.path.sep, "etc", "cbdefense", "credentials"),
            os.path.join(os.path.expanduser("~"), ".cbdefense", "credentials"),
            os.path.join(".", ".cbdefense", "credentials"),
        ]

        if "credential_file" in kwargs:
            if isinstance(kwargs["credential_file"], six.string_types):
                self.credential_search_path = [kwargs["credential_file"]]
            elif type(kwargs["credential_file"]) is list:
                self.credential_search_path = kwargs["credential_file"]

        self.credentials = RawConfigParser(defaults=default_profile)
        self.credential_files = self.credentials.read(self.credential_search_path)

    def get_credentials(self, profile=None):
        credential_profile = profile or "default"
        if credential_profile not in self.get_profiles():
            raise CredentialError("Cannot find credential profile '%s' after searching in these files: %s." %
                                  (credential_profile, ", ".join(self.credential_search_path)))

        retval = {}
        for k, v in six.iteritems(default_profile):
                retval[k] = self.credentials.get(credential_profile, k)
		
        if not retval["api_key"] or not retval["conn_id"] or not retval["cbd_api_url"]:
            raise CredentialError("API Key and Connector ID not available for profile %s" % credential_profile)

        return Credentials(retval)

    def get_profiles(self):
        return self.credentials.sections()
コード例 #3
0
ファイル: config.py プロジェクト: sflavier/lingpy
    def __init__(self, name, default=None, **kw):
        """Initialization.

        :param name: Basename for the config file (suffix .ini will be appended).
        :param default: Default content of the config file.
        """
        self.name = name
        self.default = default
        config_dir = kw.pop("config_dir", None) or text_type(DIR)
        RawConfigParser.__init__(self, kw, allow_no_value=True)
        if self.default:
            if PY3:
                fp = io.StringIO(self.default)
            else:
                fp = io.BytesIO(self.default.encode("utf8"))
            self.readfp(fp)

        cfg_path = os.path.join(config_dir, name + ".ini")
        if os.path.exists(cfg_path):
            assert os.path.isfile(cfg_path)
            self.read(cfg_path)
        else:
            if not os.path.exists(config_dir):
                try:
                    os.mkdir(config_dir)
                except OSError:  # pragma: no cover
                    # this happens when run on travis-ci, by a system user.
                    pass
            if os.path.exists(config_dir):
                with open(cfg_path, "w") as fp:
                    self.write(fp)
        self.path = Path(cfg_path)
コード例 #4
0
ファイル: config.py プロジェクト: ioverka/pycdstar
    def __init__(self, **kw):
        cfg_path = kw.pop('cfg', None) \
            or os.path.join(APP_DIRS.user_config_dir, 'config.ini')
        cfg_path = os.path.abspath(cfg_path)

        RawConfigParser.__init__(self)

        if os.path.exists(cfg_path):
            assert os.path.isfile(cfg_path)
            self.read(cfg_path)
        else:
            self.add_section('service')
            for opt in 'url user password'.split():
                self.set('service', opt, kw.get(opt, '') or '')
            self.add_section('logging')
            self.set('logging', 'level', 'INFO')

            config_dir = os.path.dirname(cfg_path)
            if not os.path.exists(config_dir):
                try:
                    os.makedirs(config_dir)
                except OSError:  # pragma: no cover
                    # this happens when run on travis-ci, by a system user.
                    pass
            if os.path.exists(config_dir):
                with open(cfg_path, 'w') as fp:
                    self.write(fp)
        level = self.get('logging', 'level', default=None)
        if level:
            logging.basicConfig(level=getattr(logging, level))
コード例 #5
0
ファイル: azureJobStore.py プロジェクト: chapmanb/toil
def _fetchAzureAccountKey(accountName):
    """
    Find the account key for a given Azure storage account.

    The account key is taken from the AZURE_ACCOUNT_KEY_<account> environment variable if it
    exists, then from plain AZURE_ACCOUNT_KEY, and then from looking in the file
    ~/.toilAzureCredentials. That file has format:

    [AzureStorageCredentials]
    accountName1=ACCOUNTKEY1==
    accountName2=ACCOUNTKEY2==
    """
    try:
        return os.environ['AZURE_ACCOUNT_KEY_' + accountName]
    except KeyError:
        try:
            return os.environ['AZURE_ACCOUNT_KEY']
        except KeyError:
            configParser = RawConfigParser()
            configParser.read(os.path.expanduser(credential_file_path))
            try:
                return configParser.get('AzureStorageCredentials', accountName)
            except NoOptionError:
                raise RuntimeError("No account key found for '%s', please provide it in '%s'" %
                                   (accountName, credential_file_path))
コード例 #6
0
ファイル: configparser.py プロジェクト: Tribler/tribler
 def set(self, section, option, new_value):
     with self.lock:
         if self.callback and self.has_section(section) and self.has_option(section, option):
             old_value = self.get(section, option)
             if not self.callback(section, option, new_value, old_value):
                 raise OperationNotPossibleAtRuntimeException
         RawConfigParser.set(self, section, option, new_value)
コード例 #7
0
 def test_read_test_tribler_conf(self):
     """
     Test upgrading a Tribler configuration from 7.0 to 7.1
     """
     old_config = RawConfigParser()
     old_config.read(os.path.join(self.CONFIG_PATH, "tribler70.conf"))
     new_config = TriblerConfig()
     result_config = add_tribler_config(new_config, old_config)
     self.assertEqual(result_config.get_default_safeseeding_enabled(), True)
コード例 #8
0
ファイル: vcs.py プロジェクト: franco999/weblate
 def get_config(self, path):
     """
     Reads entry from configuration.
     """
     section, option = path.split('.', 1)
     filename = os.path.join(self.path, '.hg', 'hgrc')
     config = RawConfigParser()
     config.read(filename)
     if config.has_option(section, option):
         return config.get(section, option).decode('utf-8')
     return None
コード例 #9
0
 def test_read_test_libtribler_conf(self):
     """
     Test upgrading a libtribler configuration from 7.0 to 7.1
     """
     os.environ['TSTATEDIR'] = self.session_base_dir
     old_config = RawConfigParser()
     old_config.read(os.path.join(self.CONFIG_PATH, "libtribler70.conf"))
     new_config = TriblerConfig()
     result_config = add_libtribler_config(new_config, old_config)
     self.assertEqual(result_config.get_tunnel_community_socks5_listen_ports(), [1, 2, 3, 4, 5, 6])
     self.assertEqual(result_config.get_anon_proxy_settings(), (2, ("127.0.0.1", [5, 4, 3, 2, 1]), ''))
     self.assertEqual(result_config.get_credit_mining_sources(), ['source1', 'source2'])
     self.assertEqual(result_config.get_log_dir(), '/a/b/c')
コード例 #10
0
    def test_read_test_corr_tribler_conf(self):
        """
        Adding corrupt values should result in the default value.

        Note that this test might fail if there is already an upgraded config stored in the default
        state directory. The code being tested here however shouldn't be ran if that config already exists.
        :return:
        """
        old_config = RawConfigParser()
        old_config.read(os.path.join(self.CONFIG_PATH, "triblercorrupt70.conf"))
        new_config = TriblerConfig()
        result_config = add_tribler_config(new_config, old_config)
        self.assertEqual(result_config.get_default_anonymity_enabled(), True)
コード例 #11
0
    def read_configuration(self):
        # Assemble config_file_options
        for section in self.base_config_file_options:
            self.config_file_options[section].update(
                self.base_config_file_options[section])

        # Read config
        cp = RawConfigParser()
        cp.read_file = cp.readfp if six.PY2 else cp.read_file
        try:
            cp.read_file(open(self.args.config_file))
        except MissingSectionHeaderError:
            # No section headers? Assume the [General] section is implied.
            configuration = '[General]\n' + open(self.args.config_file).read()
            cp.read_file(StringIO(configuration))

        # Convert config to dictionary (for Python 2.7 compatibility)
        self.config = {}
        for section in cp.sections():
            self.config[section] = dict(cp.items(section))

        # Set defaults
        for section in self.config:
            section_options_key = section \
                if section in self.config_file_options else 'other'
            section_options = self.config_file_options[section_options_key]
            if section_options == 'nocheck':
                continue
            for option in section_options:
                value = section_options[option]
                if value is not None:
                    self.config[section].setdefault(option, value)

        # Check
        self.check_configuration()
コード例 #12
0
    def test_upgrade_pstate_files(self):
        """
        Test whether the existing pstate files are correctly updated to 7.1.
        """
        os.makedirs(os.path.join(self.state_dir, STATEDIR_DLPSTATE_DIR))

        # Copy an old pstate file
        src_path = os.path.join(self.CONFIG_PATH, "download_pstate_70.state")
        shutil.copyfile(src_path, os.path.join(self.state_dir, STATEDIR_DLPSTATE_DIR, "download.state"))

        # Copy a corrupt pstate file
        src_path = os.path.join(self.CONFIG_PATH, "download_pstate_70_corrupt.state")
        corrupt_dest_path = os.path.join(self.state_dir, STATEDIR_DLPSTATE_DIR, "downloadcorrupt.state")
        shutil.copyfile(src_path, corrupt_dest_path)

        old_config = RawConfigParser()
        old_config.read(os.path.join(self.CONFIG_PATH, "tribler70.conf"))
        convert_config_to_tribler71(old_config, state_dir=self.state_dir)

        # Verify whether the section is correctly renamed
        download_config = RawConfigParser()
        download_config.read(os.path.join(self.state_dir, STATEDIR_DLPSTATE_DIR, "download.state"))
        self.assertTrue(download_config.has_section("download_defaults"))
        self.assertFalse(download_config.has_section("downloadconfig"))
        self.assertFalse(os.path.exists(corrupt_dest_path))

        # Do the upgrade again, it should not fail
        convert_config_to_tribler71(old_config, state_dir=self.state_dir)
コード例 #13
0
ファイル: vcs.py プロジェクト: nijel/weblate
 def get_config(self, path):
     """
     Reads entry from configuration.
     """
     result = None
     section, option = path.split(".", 1)
     filename = os.path.join(self.path, ".hg", "hgrc")
     config = RawConfigParser()
     config.read(filename)
     if config.has_option(section, option):
         result = config.get(section, option)
         if six.PY2:
             result = result.decode("utf-8")
     return result
コード例 #14
0
ファイル: test_cainstance.py プロジェクト: npmccallum/freeipa
 def _test_write_pkispawn_config_file(self, template, expected):
     """
     Test that the values we read from an ExternalCAProfile
     object can be used to produce a reasonable-looking pkispawn
     configuration.
     """
     config = RawConfigParser()
     config.optionxform = str
     config.add_section("CA")
     config.set("CA", "pki_req_ext_oid", template.ext_oid)
     config.set("CA", "pki_req_ext_data",
                hexlify(template.get_ext_data()).decode('ascii'))
     out = StringIO()
     config.write(out)
     assert out.getvalue() == expected
コード例 #15
0
ファイル: config.py プロジェクト: luke-chang/gecko-1
    def __init__(self):
        self._config = RawConfigParser()
        self._config.optionxform = str

        self._settings = {}
        self._sections = {}
        self._finalized = False
        self.loaded_files = set()
コード例 #16
0
    def test_read_test_corr_libtribler_conf(self):
        """
        Adding corrupt values should result in the default value.

        Note that this test might fail if there is already an upgraded config stored in the default
        state directory. The code being tested here however shouldn't be ran if that config already exists.
        :return:
        """
        old_config = RawConfigParser()
        old_config.read(os.path.join(self.CONFIG_PATH, "libtriblercorrupt70.conf"))
        new_config = TriblerConfig(ConfigObj(configspec=CONFIG_SPEC_PATH))

        result_config = add_libtribler_config(new_config, old_config)

        self.assertTrue(len(result_config.get_tunnel_community_socks5_listen_ports()), 5)
        self.assertEqual(result_config.get_anon_proxy_settings(), (2, ('127.0.0.1', [-1, -1, -1, -1, -1]), ''))
        self.assertEqual(result_config.get_credit_mining_sources(), new_config.get_credit_mining_sources())
コード例 #17
0
ファイル: util.py プロジェクト: xiaogaozi/azkaban
 def __init__(self, path=expanduser("~/.azkabanrc")):
     self.parser = RawConfigParser()
     self.path = path
     if exists(path):
         try:
             self.parser.read(self.path)
         except ParsingError:
             raise AzkabanError("Invalid configuration file %r.", path)
コード例 #18
0
ファイル: util.py プロジェクト: pombredanne/glottolog3
def update_providers(args, verbose=False):
    filepath = args.data_dir.joinpath('references', 'bibtex', 'BIBFILES.ini')
    p = RawConfigParser()
    with io.open(filepath, encoding='utf-8-sig') as fp:
        p.readfp(fp)

    provider_map = get_map(Provider)
    for section in p.sections():
        sectname = section[:-4] if section.endswith('.bib') else section
        id_ = slug(sectname)
        attrs = {
            'name': p.get(section, 'title'),
            'description': p.get(section, 'description'),
            'abbr': p.get(section, 'abbr'),
        }
        if id_ in provider_map:
            provider = provider_map[id_]
            for a in list(attrs):
                before, after = getattr(provider, a), attrs[a]
                if before == after:
                    del attrs[a]
                else:
                    setattr(provider, a, after)
                    attrs[a] = (before, after)
            if attrs:
                args.log.info('updating provider %s %s' % (slug(id_), sorted(attrs)))
            if verbose:
                for a, (before, after) in attrs.items():
                    before, after = (' '.join(_.split()) for _ in (before, after))
                    if before != after:
                        args.log.info('%s\n%r\n%r' % (a, before, after))
        else:
            args.log.info('adding provider %s' % slug(id_))
            DBSession.add(Provider(id=id_, **attrs))
コード例 #19
0
ファイル: mail.py プロジェクト: BBOOXX/stash
 def read_cfg(self):
     parser = RawConfigParser()
     parser.read(self.cfg_file)
     if not parser.has_section('mail'):
         print('Creating cfg file.')
         self.make_cfg()
     
     self.auth     = parser.get('mail','auth')
     self.user     = parser.get('mail','username')
     self.passwd   = parser.get('mail','password')
     self.mailfrom = parser.get('mail','mailfrom')
     self.host     = parser.get('mail','host')
     self.port     = parser.get('mail','port')
     self.tls      = parser.get('mail','tls')
コード例 #20
0
ファイル: tweet.py プロジェクト: chaddotson/pytools
def main():
    logging_config = dict(level=INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    if PY2:
        logging_config['disable_existing_loggers'] = True

    basicConfig(**logging_config)

    args = get_args()
    config = RawConfigParser()
    config.read(args.configuration)

    timestamp = datetime.now().strftime("%H:%M:%S")
    machine = gethostname()

    if args.no_stamp:
        message = args.message
    else:
        message = "CNC/{machine} [{timestamp}]: {content}".format(machine=machine,
                                                                  timestamp=timestamp,
                                                                  content=args.message)

    logger.info("Sending message to twitter: %s", message)
    tweet(config.get("TWITTER", "CONSUMER_KEY"),
          config.get("TWITTER", "CONSUMER_SECRET"),
          config.get("TWITTER", "ACCESS_KEY"),
          config.get("TWITTER", "ACCESS_SECRET"),
          message)

    logger.info("Done")
コード例 #21
0
ファイル: projects.py プロジェクト: hydrargyrum/eye
	def _parseFile(self, cfgpath):
		# add a starting section so it becomes INI format
		try:
			with open(cfgpath) as fp:
				contents = fp.read()
		except IOError:
			LOGGER.error('cannot read %r', cfgpath, exc_info=True)
			return None
		fp = StringIO('[_ROOT_]\n%s' % contents)

		cfg = RawConfigParser()
		try:
			cfg.readfp(fp, cfgpath)
		except Error:
			LOGGER.error('cannot parse %r', cfgpath, exc_info=True)
			return None

		return cfg
コード例 #22
0
ファイル: mkwsgiinstance.py プロジェクト: zopefoundation/Zope
def check_buildout(script_path):
    """ Are we running from within a buildout which supplies 'zopepy'?
    """
    buildout_cfg = os.path.join(os.path.dirname(script_path), 'buildout.cfg')
    if os.path.exists(buildout_cfg):
        parser = RawConfigParser()
        try:
            parser.read(buildout_cfg)
            return 'zopepy' in parser.sections()
        except ParsingError:
            # zc.buildout uses its own parser and it allows syntax that
            # ConfigParser does not like. Here's one really stupid workaround.
            # The alternative is using the zc.buildout parser, which would
            # introduce a hard dependency.
            zope_py = os.path.join(os.path.dirname(script_path),
                                   'bin', 'zopepy')
            if os.path.isfile(zope_py) and os.access(zope_py, os.X_OK):
                return True
コード例 #23
0
ファイル: helpers.py プロジェクト: larsks/cloud-init
 def get(self, section, option):
     value = self.DEF_BASE
     try:
         value = RawConfigParser.get(self, section, option)
     except NoSectionError:
         pass
     except NoOptionError:
         pass
     return value
コード例 #24
0
ファイル: units.py プロジェクト: NoriVicJr/OpenMDAO
def update_library(filename):
    """
    Update units in current library from `filename`, which must contain a
    ``units`` section.

    filename: string or file
        Source of units configuration data.
    """
    if isinstance(filename, basestring):
        inp = open(filename, 'rU')
    else:
        inp = filename
    try:
        cfg = ConfigParser()
        cfg.optionxform = _do_nothing
        cfg.readfp(inp)
        _update_library(cfg)
    finally:
        inp.close()
コード例 #25
0
ファイル: config.py プロジェクト: Riptawr/hdfs
 def __init__(self, path=None, stream_log_level=None):
   RawConfigParser.__init__(self)
   self._clients = {}
   self.path = path or os.getenv('HDFSCLI_CONFIG', self.default_path)
   if stream_log_level:
     stream_handler = lg.StreamHandler()
     stream_handler.setLevel(stream_log_level)
     fmt = '%(levelname)s\t%(message)s'
     stream_handler.setFormatter(lg.Formatter(fmt))
     lg.getLogger().addHandler(stream_handler)
   if osp.exists(self.path):
     try:
       self.read(self.path)
     except ParsingError:
       raise HdfsError('Invalid configuration file %r.', self.path)
     else:
       self._autoload()
     _logger.info('Instantiated configuration from %r.', self.path)
   else:
     _logger.info('Instantiated empty configuration.')
コード例 #26
0
ファイル: config.py プロジェクト: MPDL/pyimeji
 def __init__(self, **kw):
     config_dir = kw.pop('config_dir', None) or APP_DIRS.user_config_dir
     RawConfigParser.__init__(self, kw)
     config_file = kw.pop('config_file', 'config.ini')
     cfg_path = os.path.join(config_dir, config_file)
     if os.path.exists(cfg_path):
         assert os.path.isfile(cfg_path)
         self.read(cfg_path)
     else:
         if not os.path.exists(config_dir):
             try:
                 os.makedirs(config_dir)
             except OSError:  # pragma: no cover
                 # this happens when run on travis-ci, by a system user.
                 pass
         if os.path.exists(config_dir):
             with open(cfg_path, 'w') as fp:
                 self.write(fp)
     level = self.get('logging', 'level', default=None)
     if level:
         logging.basicConfig(level=getattr(logging, level))
コード例 #27
0
ファイル: units.py プロジェクト: OpenMDAO/OpenMDAO
def update_library(filename):
    """
    Update units in current library from `filename`.

    Parameters
    ----------
    filename : string or file
        Source of units configuration data.
    """
    if isinstance(filename, basestring):
        inp = open(filename, 'rU')
    else:
        inp = filename
    try:
        cfg = ConfigParser()
        cfg.optionxform = _do_nothing

        # New in Python 3.2: read_file() replaces readfp().
        if sys.version_info >= (3, 2):
            cfg.read_file(inp)
        else:
            cfg.readfp(inp)

        _update_library(cfg)
    finally:
        inp.close()
コード例 #28
0
ファイル: remote.py プロジェクト: dacut/dist.kanga.org
def remotebuild():
    builder = Builder()

    try:
        opts, args = getopt(argv[1:], "c:g:hi:k:o:p:r:s:V:",
                            ["config=", "security-group=", "security-groups=",
                             "securitygroup=", "securitygroups=", "help",
                             "instance-profile-name=", "instance-profile=",
                             "instanceprofilename=", "instanceprofile-name=",
                             "key-name=", "keyname=", "key=", "os=",
                             "profile=", "region=", "subnet-id=", "subnet=",
                             "volume-size="])
    except GetoptError as e:
        print(str(e), file=stderr)
        remotebuild_usage()
        return 1

    for opt, value in opts:
        if opt in ("-c", "--config"):
            cp = RawConfigParser()
            cp.read([value])
            for opt, value in cp.items("dist.kanga.org"):
                parse_remotebuild_option(builder, opt, value)
        else:
            try:
                parse_remotebuild_option(builder, opt, value)
            except StopIteration:
                return 0
            except ValueError as e:
                print(str(e), file=stderr)
                remotebuild_usage()
                return 1

    if len(builder.os_ids) == 0:
        print("No OS ids specified for building.", file=stderr)
        return 1

    builder.build_all()

    return 0
コード例 #29
0
ファイル: config.py プロジェクト: Network-Dynamics/pyimeji
    def __init__(self, **kw):
        config_dir = kw.pop('config_dir', None) or APP_DIRS.user_config_dir
        RawConfigParser.__init__(self, kw)

        cfg_path = os.path.join(config_dir, 'config.ini')
        if os.path.exists(cfg_path):
            assert os.path.isfile(cfg_path)
            self.read(cfg_path)
        else:
            if not os.path.exists(config_dir):
                try:
                    os.makedirs(config_dir)
                except OSError:  # pragma: no cover
                    # this happens when run on travis-ci, by a system user.
                    pass
            if os.path.exists(config_dir):
                with open(cfg_path, 'w') as fp:
                    self.write(fp)
                    print("New blanck configuration file generated at %s, please edit to to suit your needs.\
                            documentation at: %s"%(cfg_path,'http://pyimeji.readthedocs.org/en/latest/tutorial.html'))
        level = self.get('logging', 'level', default=None)
        if level:
            logging.basicConfig(level=getattr(logging, level))
コード例 #30
0
ファイル: util.py プロジェクト: mtth/azkaban
 def __init__(self, path=None):
   self.parser = RawConfigParser()
   self.path = path or expanduser('~/.azkabanrc')
   # TODO: make the default path be configurable via an environment variable.
   if exists(self.path):
     try:
       self.parser.read(self.path)
     except ParsingError:
       raise AzkabanError('Invalid configuration file %r.', self.path)
     else:
       # TODO: remove this in 1.0.
       if self._convert_aliases():
         self.save()
         self.parser.read(self.path)
コード例 #31
0
from datetime import timedelta, datetime
import pytz
from flask import request
import json
import iso8601
from flask.views import MethodView
from six.moves.configparser import RawConfigParser
from http.client import HTTPConnection

# project imports
from db.alert_db import alert_db
from db.ns_db import ns_db
from nbi import log_queue
from monitoring import alert_configure

config = RawConfigParser()
config.read("../../monitoring/monitoring.properties")
so_ip = config.get("ALERTS", "so_scale_ns.ip")
so_port = config.get("ALERTS", "so_scale_ns.port")
so_scale_ns_base_path = config.get("ALERTS", "so_scale_ns.base_path")


class SLAManagerAPI(MethodView):
    def post(self):
        #data_json = request.data
        # data = json.loads(data_json)
        data = request.get_json(force=True)
        if "alerts" in data:
            alerts = data['alerts']
            for alert in alerts:
コード例 #32
0
ファイル: configstore.py プロジェクト: mcanthony/Photini
 def __init__(self):
     self.path = os.path.join(data_dir, 'keys.txt')
     self.config = RawConfigParser()
     self.config.read(self.path)
コード例 #33
0
ファイル: config.py プロジェクト: mesbahs/pyimeji
 def get(self, section, option, default=NO_DEFAULT):
     if default is not NO_DEFAULT:
         if not self.has_option(section, option):
             return default
     return RawConfigParser.get(self, section, option)
コード例 #34
0
 def set(self, section, option, value=None):
     if not self.has_section(section) and section.lower() != 'default':
         self.add_section(section)
     RawConfigParser.set(self, section, option, value)
コード例 #35
0
def read_config(filename):
    """Read the config file called *filename*."""
    cp_ = RawConfigParser()
    cp_.read(filename)

    res = {}

    for section in cp_.sections():
        res[section] = dict(cp_.items(section))
        res[section].setdefault("delete", False)
        if res[section]["delete"] in ["", "False", "false", "0", "off"]:
            res[section]["delete"] = False
        if res[section]["delete"] in ["True", "true", "on", "1"]:
            res[section]["delete"] = True
        res[section].setdefault("working_directory", None)
        res[section].setdefault("compression", False)
        res[section].setdefault("xritdecompressor", None)
        res[section].setdefault("heartbeat", True)
        res[section].setdefault("req_timeout", DEFAULT_REQ_TIMEOUT)
        res[section].setdefault("transfer_req_timeout",
                                10 * DEFAULT_REQ_TIMEOUT)
        res[section].setdefault("nameservers", None)
        if res[section]["heartbeat"] in ["", "False", "false", "0", "off"]:
            res[section]["heartbeat"] = False

        if "providers" not in res[section]:
            LOGGER.warning("Incomplete section %s: add an 'providers' item.",
                           section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue
        else:
            res[section]["providers"] = [
                "tcp://" + item.split('/', 1)[0]
                for item in res[section]["providers"].split()
            ]

        if "destination" not in res[section]:
            LOGGER.warning("Incomplete section %s: add an 'destination' item.",
                           section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue

        if "topic" in res[section]:
            try:
                res[section]["publish_port"] = int(
                    res[section]["publish_port"])
            except (KeyError, ValueError):
                res[section]["publish_port"] = 0
        elif not res[section]["heartbeat"]:
            # We have no topics and therefor no subscriber (if you want to
            # subscribe everything, then explicit specify an empty topic).
            LOGGER.warning(
                "Incomplete section %s: add an 'topic' "
                "item or enable heartbeat.", section)
            LOGGER.info("Ignoring section %s: incomplete.", section)
            del res[section]
            continue

    return res
コード例 #36
0
def main():
    '''Main(). Commandline parsing and stalker startup.'''

    print("Setting timezone to UTC")
    os.environ["TZ"] = "UTC"
    time.tzset()

    parser = argparse.ArgumentParser()

    parser.add_argument("-d",
                        "--monitored_dirs",
                        dest="monitored_dirs",
                        nargs='+',
                        type=str,
                        default=[],
                        help="Names of the monitored directories "
                        "separated by space")
    parser.add_argument("-p",
                        "--posttroll_port",
                        dest="posttroll_port",
                        default=0,
                        type=int,
                        help="Local port where messages are published")
    parser.add_argument("-t",
                        "--topic",
                        dest="topic",
                        type=str,
                        default=None,
                        help="Topic of the sent messages")
    parser.add_argument("-c",
                        "--configuration_file",
                        type=str,
                        help="Name of the config.ini configuration file")
    parser.add_argument("-C",
                        "--config_item",
                        type=str,
                        help="Name of the configuration item to use")
    parser.add_argument("-e",
                        "--event_names",
                        type=str,
                        default=None,
                        help="Name of the pyinotify events to monitor")
    parser.add_argument("-f",
                        "--filepattern",
                        type=str,
                        help="Filepattern used to parse "
                        "satellite/orbit/date/etc information")
    parser.add_argument("-i",
                        "--instrument",
                        type=str,
                        default=None,
                        help="Instrument name in the satellite")
    parser.add_argument("-n",
                        "--nameservers",
                        type=str,
                        default=None,
                        help="Posttroll nameservers to register own address,"
                        " otherwise multicasting is used")

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit()
    else:
        args = parser.parse_args()

    # Parse commandline arguments.  If args are given, they override
    # the configuration file.

    # Check first commandline arguments
    monitored_dirs = args.monitored_dirs
    if monitored_dirs == '':
        monitored_dirs = None

    posttroll_port = args.posttroll_port
    topic = args.topic
    event_names = args.event_names
    instrument = args.instrument
    nameservers = args.nameservers
    config_item = args.config_item

    filepattern = args.filepattern
    if args.filepattern == '':
        filepattern = None

    if args.configuration_file is not None:
        config_fname = args.configuration_file

        if "template" in config_fname:
            print("Template file given as trollstalker logging config,"
                  " aborting!")
            sys.exit()

        config = RawConfigParser()
        config.read(config_fname)
        config = OrderedDict(config.items(args.config_item))
        config['name'] = args.configuration_file

        topic = topic or config['topic']
        monitored_dirs = monitored_dirs or config['directory'].split(",")
        filepattern = filepattern or config['filepattern']
        try:
            posttroll_port = posttroll_port or int(config['posttroll_port'])
        except (KeyError, ValueError):
            if posttroll_port is None:
                posttroll_port = 0
        try:
            filepattern = filepattern or config['filepattern']
        except KeyError:
            pass
        try:
            event_names = event_names or config['event_names']
        except KeyError:
            pass
        try:
            instrument = instrument or config['instruments']
        except KeyError:
            pass
        try:
            history = int(config['history'])
        except KeyError:
            history = 0

        try:
            nameservers = nameservers or config['nameservers']
        except KeyError:
            nameservers = []

        aliases = helper_functions.parse_aliases(config)
        tbus_orbit = bool(config.get("tbus_orbit", False))

        granule_length = float(config.get("granule", 0))

        custom_vars = parse_vars(config)

        try:
            log_config = config["stalker_log_config"]
        except KeyError:
            try:
                loglevel = getattr(logging, config["loglevel"])
                if loglevel == "":
                    raise AttributeError
            except AttributeError:
                loglevel = logging.DEBUG
            LOGGER.setLevel(loglevel)

            strhndl = logging.StreamHandler()
            strhndl.setLevel(loglevel)
            log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s"
            formatter = logging.Formatter(log_format)

            strhndl.setFormatter(formatter)
            LOGGER.addHandler(strhndl)
        else:
            logging.config.fileConfig(log_config)

    event_names = event_names or 'IN_CLOSE_WRITE,IN_MOVED_TO'

    LOGGER.debug("Logger started")

    if type(monitored_dirs) is not list:
        monitored_dirs = [monitored_dirs]

    if nameservers:
        nameservers = nameservers.split(',')
    else:
        nameservers = []

    # Start watching for new files
    notifier = create_notifier(topic,
                               instrument,
                               posttroll_port,
                               filepattern,
                               event_names,
                               monitored_dirs,
                               config_item,
                               aliases=aliases,
                               tbus_orbit=tbus_orbit,
                               history=history,
                               granule_length=granule_length,
                               custom_vars=custom_vars,
                               nameservers=nameservers)
    notifier.start()

    try:
        while True:
            time.sleep(6000000)
    except KeyboardInterrupt:
        LOGGER.info("Interupting TrollStalker")
    finally:
        notifier.stop()
コード例 #37
0
ファイル: vcs.py プロジェクト: codetriage-readme-bot/weblate
 def set_config(self, path, value):
     """Set entry in local configuration."""
     if not self.lock.is_locked:
         raise RuntimeError('Repository operation without lock held!')
     section, option = path.split('.', 1)
     filename = os.path.join(self.path, '.hg', 'hgrc')
     if six.PY2:
         value = value.encode('utf-8')
         section = section.encode('utf-8')
         option = option.encode('utf-8')
     config = RawConfigParser()
     config.read(filename)
     if not config.has_section(section):
         config.add_section(section)
     if (config.has_option(section, option)
             and config.get(section, option) == value):
         return
     config.set(section, option, value)
     with open(filename, 'w') as handle:
         config.write(handle)
コード例 #38
0
ファイル: config.py プロジェクト: robertnorrie/browser-ff
class ConfigSettings(collections.Mapping):
    """Interface for configuration settings.

    This is the main interface to the configuration.

    A configuration is a collection of sections. Each section contains
    key-value pairs.

    When an instance is created, the caller first registers ConfigProvider
    instances with it. This tells the ConfigSettings what individual settings
    are available and defines extra metadata associated with those settings.
    This is used for validation, etc.

    Once ConfigProvider instances are registered, a config is populated. It can
    be loaded from files or populated by hand.

    ConfigSettings instances are accessed like dictionaries or by using
    attributes. e.g. the section "foo" is accessed through either
    settings.foo or settings['foo'].

    Sections are modeled by the ConfigSection class which is defined inside
    this one. They look just like dicts or classes with attributes. To access
    the "bar" option in the "foo" section:

        value = settings.foo.bar
        value = settings['foo']['bar']
        value = settings.foo['bar']

    Assignment is similar:

        settings.foo.bar = value
        settings['foo']['bar'] = value
        settings['foo'].bar = value

    You can even delete user-assigned values:

        del settings.foo.bar
        del settings['foo']['bar']

    If there is a default, it will be returned.

    When settings are mutated, they are validated against the registered
    providers. Setting unknown settings or setting values to illegal values
    will result in exceptions being raised.
    """
    class ConfigSection(collections.MutableMapping, object):
        """Represents an individual config section."""
        def __init__(self, config, name, settings):
            object.__setattr__(self, "_config", config)
            object.__setattr__(self, "_name", name)
            object.__setattr__(self, "_settings", settings)

            wildcard = any(s == "*" for s in self._settings)
            object.__setattr__(self, "_wildcard", wildcard)

        @property
        def options(self):
            try:
                return self._config.options(self._name)
            except NoSectionError:
                return []

        def get_meta(self, option):
            if option in self._settings:
                return self._settings[option]
            if self._wildcard:
                return self._settings["*"]
            raise KeyError("Option not registered with provider: %s" % option)

        def _validate(self, option, value):
            meta = self.get_meta(option)
            meta["type_cls"].validate(value)

            if "choices" in meta and value not in meta["choices"]:
                raise ValueError("Value '%s' must be one of: %s" %
                                 (value, ", ".join(sorted(meta["choices"]))))

        # MutableMapping interface
        def __len__(self):
            return len(self.options)

        def __iter__(self):
            return iter(self.options)

        def __contains__(self, k):
            return self._config.has_option(self._name, k)

        def __getitem__(self, k):
            meta = self.get_meta(k)

            if self._config.has_option(self._name, k):
                v = meta["type_cls"].from_config(self._config, self._name, k)
            else:
                v = meta.get("default", DefaultValue)

            if v == DefaultValue:
                raise KeyError("No default value registered: %s" % k)

            self._validate(k, v)
            return v

        def __setitem__(self, k, v):
            self._validate(k, v)
            meta = self.get_meta(k)

            if not self._config.has_section(self._name):
                self._config.add_section(self._name)

            self._config.set(self._name, k, meta["type_cls"].to_config(v))

        def __delitem__(self, k):
            self._config.remove_option(self._name, k)

            # Prune empty sections.
            if not len(self._config.options(self._name)):
                self._config.remove_section(self._name)

        @reraise_attribute_error
        def __getattr__(self, k):
            return self.__getitem__(k)

        @reraise_attribute_error
        def __setattr__(self, k, v):
            self.__setitem__(k, v)

        @reraise_attribute_error
        def __delattr__(self, k):
            self.__delitem__(k)

    def __init__(self):
        self._config = RawConfigParser()
        self._config.optionxform = str

        self._settings = {}
        self._sections = {}
        self._finalized = False
        self.loaded_files = set()

    def load_file(self, filename):
        self.load_files([filename])

    def load_files(self, filenames):
        """Load a config from files specified by their paths.

        Files are loaded in the order given. Subsequent files will overwrite
        values from previous files. If a file does not exist, it will be
        ignored.
        """
        filtered = [f for f in filenames if os.path.exists(f)]

        fps = [open(f, "rt") for f in filtered]
        self.load_fps(fps)
        self.loaded_files.update(set(filtered))
        for fp in fps:
            fp.close()

    def load_fps(self, fps):
        """Load config data by reading file objects."""

        for fp in fps:
            self._config.readfp(fp)

    def write(self, fh):
        """Write the config to a file object."""
        self._config.write(fh)

    @classmethod
    def _format_metadata(
        cls,
        provider,
        section,
        option,
        type_cls,
        description,
        default=DefaultValue,
        extra=None,
    ):
        """Formats and returns the metadata for a setting.

        Each setting must have:

            section -- str section to which the setting belongs. This is how
                settings are grouped.

            option -- str id for the setting. This must be unique within the
                section it appears.

            type -- a ConfigType-derived type defining the type of the setting.

            description -- str describing how to use the setting and where it
                applies.

        Each setting has the following optional parameters:

            default -- The default value for the setting. If None (the default)
                there is no default.

            extra -- A dict of additional key/value pairs to add to the
                setting metadata.
        """
        if isinstance(type_cls, string_types):
            type_cls = TYPE_CLASSES[type_cls]

        meta = {
            "description": description,
            "type_cls": type_cls,
        }

        if default != DefaultValue:
            meta["default"] = default

        if extra:
            meta.update(extra)

        return meta

    def register_provider(self, provider):
        """Register a SettingsProvider with this settings interface."""

        if self._finalized:
            raise ConfigException(
                "Providers cannot be registered after finalized.")

        settings = provider.config_settings
        if callable(settings):
            settings = settings()

        config_settings = collections.defaultdict(dict)
        for setting in settings:
            section, option = setting[0].split(".")

            if option in config_settings[section]:
                raise ConfigException(
                    "Setting has already been registered: %s.%s" %
                    (section, option))

            meta = self._format_metadata(provider, section, option,
                                         *setting[1:])
            config_settings[section][option] = meta

        for section_name, settings in config_settings.items():
            section = self._settings.get(section_name, {})

            for k, v in settings.items():
                if k in section:
                    raise ConfigException("Setting already registered: %s.%s" %
                                          (section_name, k))

                section[k] = v

            self._settings[section_name] = section

    def _finalize(self):
        if self._finalized:
            return

        for section, settings in self._settings.items():
            s = ConfigSettings.ConfigSection(self._config, section, settings)
            self._sections[section] = s

        self._finalized = True

    # Mapping interface.
    def __len__(self):
        return len(self._settings)

    def __iter__(self):
        self._finalize()

        return iter(self._sections.keys())

    def __contains__(self, k):
        return k in self._settings

    def __getitem__(self, k):
        self._finalize()

        return self._sections[k]

    # Allow attribute access because it looks nice.
    @reraise_attribute_error
    def __getattr__(self, k):
        return self.__getitem__(k)
コード例 #39
0
    def __spawn_instance(self):
        """
        Create and configure a new KRA instance using pkispawn.
        Creates a configuration file with IPA-specific
        parameters and passes it to the base class to call pkispawn
        """

        # Create an empty and secured file
        (cfg_fd, cfg_file) = tempfile.mkstemp()
        os.close(cfg_fd)
        pent = pwd.getpwnam(self.service_user)
        os.chown(cfg_file, pent.pw_uid, pent.pw_gid)
        self.tmp_agent_db = tempfile.mkdtemp(prefix="tmp-",
                                             dir=paths.VAR_LIB_IPA)
        tmp_agent_pwd = ipautil.ipa_generate_password()

        # Create a temporary file for the admin PKCS #12 file
        (admin_p12_fd, admin_p12_file) = tempfile.mkstemp()
        os.close(admin_p12_fd)

        # Create KRA configuration
        config = RawConfigParser()
        config.optionxform = str
        config.add_section("KRA")

        # Security Domain Authentication
        config.set("KRA", "pki_security_domain_https_port", "443")
        config.set("KRA", "pki_security_domain_password", self.admin_password)
        config.set("KRA", "pki_security_domain_user", self.admin_user)

        # issuing ca
        config.set("KRA", "pki_issuing_ca_uri",
                   "https://%s" % ipautil.format_netloc(self.fqdn, 443))

        # Server
        config.set("KRA", "pki_enable_proxy", "True")
        config.set("KRA", "pki_restart_configured_instance", "False")
        config.set("KRA", "pki_backup_keys", "True")
        config.set("KRA", "pki_backup_password", self.admin_password)

        # Client security database
        config.set("KRA", "pki_client_database_dir", self.tmp_agent_db)
        config.set("KRA", "pki_client_database_password", tmp_agent_pwd)
        config.set("KRA", "pki_client_database_purge", "True")
        config.set("KRA", "pki_client_pkcs12_password", self.admin_password)

        # Administrator
        config.set("KRA", "pki_admin_name", self.admin_user)
        config.set("KRA", "pki_admin_uid", self.admin_user)
        config.set("KRA", "pki_admin_email", "root@localhost")
        config.set("KRA", "pki_admin_password", self.admin_password)
        config.set("KRA", "pki_admin_nickname", "ipa-ca-agent")
        config.set("KRA", "pki_admin_subject_dn",
                   str(DN(('cn', 'ipa-ca-agent'), self.subject_base)))
        config.set("KRA", "pki_import_admin_cert", "False")
        config.set("KRA", "pki_client_admin_cert_p12", admin_p12_file)

        # Directory server
        config.set("KRA", "pki_ds_ldap_port", "389")
        config.set("KRA", "pki_ds_password", self.dm_password)
        config.set("KRA", "pki_ds_base_dn", six.text_type(self.basedn))
        config.set("KRA", "pki_ds_database", "ipaca")
        config.set("KRA", "pki_ds_create_new_db", "False")

        self._use_ldaps_during_spawn(config)

        # Certificate subject DNs
        config.set("KRA", "pki_subsystem_subject_dn",
                   str(DN(('cn', 'CA Subsystem'), self.subject_base)))
        config.set("KRA", "pki_sslserver_subject_dn",
                   str(DN(('cn', self.fqdn), self.subject_base)))
        config.set("KRA", "pki_audit_signing_subject_dn",
                   str(DN(('cn', 'KRA Audit'), self.subject_base)))
        config.set(
            "KRA", "pki_transport_subject_dn",
            str(DN(('cn', 'KRA Transport Certificate'), self.subject_base)))
        config.set(
            "KRA", "pki_storage_subject_dn",
            str(DN(('cn', 'KRA Storage Certificate'), self.subject_base)))

        # Certificate nicknames
        # Note that both the server certs and subsystem certs reuse
        # the ca certs.
        config.set("KRA", "pki_subsystem_nickname",
                   "subsystemCert cert-pki-ca")
        config.set("KRA", "pki_sslserver_nickname", "Server-Cert cert-pki-ca")
        config.set("KRA", "pki_audit_signing_nickname",
                   "auditSigningCert cert-pki-kra")
        config.set("KRA", "pki_transport_nickname",
                   "transportCert cert-pki-kra")
        config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra")

        # Shared db settings
        # Needed because CA and KRA share the same database
        # We will use the dbuser created for the CA
        config.set("KRA", "pki_share_db", "True")
        config.set(
            "KRA", "pki_share_dbuser_dn",
            str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca'))))

        if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR)
                and os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)):
            # generate pin which we know can be used for FIPS NSS database
            pki_pin = ipautil.ipa_generate_password()
            config.set("KRA", "pki_pin", pki_pin)
        else:
            pki_pin = None

        _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP)

        if self.clone:
            krafile = self.pkcs12_info[0]
            shutil.copy(krafile, p12_tmpfile_name)
            pent = pwd.getpwnam(self.service_user)
            os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid)

            # Security domain registration
            config.set("KRA", "pki_security_domain_hostname", self.fqdn)
            config.set("KRA", "pki_security_domain_https_port", "443")
            config.set("KRA", "pki_security_domain_user", self.admin_user)
            config.set("KRA", "pki_security_domain_password",
                       self.admin_password)

            # Clone
            config.set("KRA", "pki_clone", "True")
            config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name)
            config.set("KRA", "pki_clone_pkcs12_password", self.dm_password)
            config.set("KRA", "pki_clone_setup_replication", "False")
            config.set(
                "KRA", "pki_clone_uri",
                "https://%s" % ipautil.format_netloc(self.master_host, 443))
        else:
            # the admin cert file is needed for the first instance of KRA
            cert = self.get_admin_cert()
            # First make sure that the directory exists
            parentdir = os.path.dirname(paths.ADMIN_CERT_PATH)
            if not os.path.exists(parentdir):
                os.makedirs(parentdir)
            with open(paths.ADMIN_CERT_PATH, "wb") as admin_path:
                admin_path.write(
                    base64.b64encode(cert.public_bytes(x509.Encoding.DER)))

        # Generate configuration file
        with open(cfg_file, "w") as f:
            config.write(f)

        try:
            DogtagInstance.spawn_instance(self,
                                          cfg_file,
                                          nolog_list=(self.dm_password,
                                                      self.admin_password,
                                                      pki_pin, tmp_agent_pwd))
        finally:
            os.remove(p12_tmpfile_name)
            os.remove(cfg_file)
            os.remove(admin_p12_file)

        shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12)
        logger.debug("completed creating KRA instance")
コード例 #40
0
ファイル: configstore.py プロジェクト: dirten/Photini
class BaseConfigStore(object):
    # the actual config store functionality
    def __init__(self, name, *arg, **kw):
        super(BaseConfigStore, self).__init__(*arg, **kw)
        self.dirty = False
        self.config = RawConfigParser()
        config_dir = os.environ.get('PHOTINI_CONFIG')
        if config_dir:
            config_dir = os.path.expanduser(config_dir)
        elif hasattr(appdirs, 'user_config_dir'):
            config_dir = appdirs.user_config_dir('photini')
        else:
            config_dir = appdirs.user_data_dir('photini')
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir, mode=stat.S_IRWXU)
        self.file_name = os.path.join(config_dir, name + '.ini')
        if os.path.isfile(self.file_name):
            if six.PY2:
                self.config.readfp(open(self.file_name, 'r'))
            else:
                self.config.readfp(open(self.file_name, 'r', encoding='utf-8'))
        self.has_section = self.config.has_section

    def get(self, section, option, default=None):
        if self.config.has_option(section, option):
            result = self.config.get(section, option)
            if six.PY2:
                return result.decode('utf-8')
            return result
        if default is not None:
            self.set(section, option, default)
        return default

    def set(self, section, option, value):
        if six.PY2:
            value = value.encode('utf-8')
        if not self.config.has_section(section):
            self.config.add_section(section)
        elif (self.config.has_option(section, option) and
              self.config.get(section, option) == value):
            return
        self.config.set(section, option, value)
        self.dirty = True

    def remove_section(self, section):
        if not self.config.has_section(section):
            return
        for option in self.config.options(section):
            self.config.remove_option(section, option)
        self.config.remove_section(section)
        self.dirty = True

    def save(self):
        if not self.dirty:
            return
        if six.PY2:
            self.config.write(open(self.file_name, 'w'))
        else:
            self.config.write(open(self.file_name, 'w', encoding='utf-8'))
        os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR)
        self.dirty = False
コード例 #41
0
class TestGithubService(TestCase):

    def setUp(self):
        self.config = RawConfigParser()
        self.config.interactive = False
        self.config.add_section('general')
        self.config.add_section('mygithub')
        self.config.set('mygithub', 'service', 'github')
        self.config.set('mygithub', 'github.login', 'tintin')
        self.config.set('mygithub', 'github.username', 'milou')
        self.config.set('mygithub', 'github.password', 't0ps3cr3t')
        self.service_config = ServiceConfig(
            GithubService.CONFIG_PREFIX, self.config, 'mygithub')

    def test_token_authorization_header(self):
        self.config.remove_option('mygithub', 'github.password')
        self.config.set('mygithub', 'github.token',
                        '@oracle:eval:echo 1234567890ABCDEF')
        service = GithubService(self.config, 'general', 'mygithub')
        self.assertEqual(service.client.session.headers['Authorization'],
                         "token 1234567890ABCDEF")

    def test_default_host(self):
        """ Check that if github.host is not set, we default to github.com """
        service = GithubService(self.config, 'general', 'mygithub')
        self.assertEquals("github.com", service.host)

    def test_overwrite_host(self):
        """ Check that if github.host is set, we use its value as host """
        self.config.set('mygithub', 'github.host', 'github.example.com')
        service = GithubService(self.config, 'general', 'mygithub')
        self.assertEquals("github.example.com", service.host)

    def test_keyring_service(self):
        """ Checks that the keyring service name """
        keyring_service = GithubService.get_keyring_service(self.service_config)
        self.assertEquals("github://[email protected]/milou", keyring_service)

    def test_keyring_service_host(self):
        """ Checks that the keyring key depends on the github host. """
        self.config.set('mygithub', 'github.host', 'github.example.com')
        keyring_service = GithubService.get_keyring_service(self.service_config)
        self.assertEquals("github://[email protected]/milou", keyring_service)

    def test_get_repository_from_issue_url__issue(self):
        issue = dict(repos_url="https://github.com/foo/bar")
        repository = GithubService.get_repository_from_issue(issue)
        self.assertEquals("foo/bar", repository)

    def test_get_repository_from_issue_url__pull_request(self):
        issue = dict(repos_url="https://github.com/foo/bar")
        repository = GithubService.get_repository_from_issue(issue)
        self.assertEquals("foo/bar", repository)

    def test_get_repository_from_issue__enterprise_github(self):
        issue = dict(repos_url="https://github.acme.biz/foo/bar")
        repository = GithubService.get_repository_from_issue(issue)
        self.assertEquals("foo/bar", repository)
コード例 #42
0
ファイル: helpers.py プロジェクト: vrautela/treadmill-workdir
 def getfloat(self, section, option):
     if not self.has_option(section, option):
         return self.DEF_FLOAT
     return RawConfigParser.getfloat(self, section, option)
コード例 #43
0
ファイル: helpers.py プロジェクト: vrautela/treadmill-workdir
 def getboolean(self, section, option):
     if not self.has_option(section, option):
         return self.DEF_BOOLEAN
     return RawConfigParser.getboolean(self, section, option)
コード例 #44
0
        handler = logging.StreamHandler()
    handler.setFormatter(
        logging.Formatter(
            "[%(levelname)s: %(asctime)s :"
            " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S'))
    if opts.verbose:
        loglevel = logging.DEBUG
    else:
        loglevel = logging.INFO
    handler.setLevel(loglevel)
    logging.getLogger('').setLevel(loglevel)
    logging.getLogger('').addHandler(handler)
    logging.getLogger("posttroll").setLevel(logging.INFO)
    LOG = logging.getLogger("cat")

    cfg = RawConfigParser()
    cfg.read(opts.config)
    config = dict(cfg.items(opts.config_item))

    services = ''
    if 'services' in config:
        services = config['services'].split(',')

    nameservers = []
    if 'nameservers' in config:
        nameservers = config['nameservers'].split(',')

    try:
        with Publish("cat_" + opts.config_item,
                     nameservers=nameservers) as pub:
            with Subscribe(services, config["topic"], True) as sub:
コード例 #45
0
ファイル: util.py プロジェクト: alabamamonroe/azkaban
class Config(object):
    """Configuration class.

  :param path: path to configuration file. If no file exists at that location,
    the configuration parser will be empty.

  """
    def __init__(self, path=expanduser('~/.azkabanrc')):
        self.parser = RawConfigParser()
        self.path = path
        if exists(path):
            try:
                self.parser.read(self.path)
            except ParsingError:
                raise AzkabanError('Invalid configuration file %r.', path)

    def save(self):
        """Save configuration parser back to file."""
        with open(self.path, 'w') as writer:
            self.parser.write(writer)

    def get_option(self, command, name, default=None):
        """Get option value for a command.

    :param command: Command the option should be looked up for.
    :param name: Name of the option.
    :param default: Default value to be returned if not found in the
      configuration file. If not provided, will raise
      :class:`~azkaban.util.AzkabanError`.

    """
        try:
            return self.parser.get(command, name)
        except (NoOptionError, NoSectionError):
            if default is not None:
                return default
            else:
                raise AzkabanError(
                    'No %(name)s found in %(path)r for %(command)s.\n'
                    'You can specify one by adding a `%(name)s` option in the '
                    '`%(command)s` section.' % {
                        'command': command,
                        'name': name,
                        'path': self.path
                    })

    def get_file_handler(self, command):
        """Add and configure file handler.

    :param command: Command the options should be looked up for.

    The default path can be configured via the `default.log` option in the
    command's corresponding section.

    """
        handler_path = osp.join(gettempdir(), '%s.log' % (command, ))
        try:
            handler = TimedRotatingFileHandler(
                self.get_option(command, 'default.log', handler_path),
                when='midnight',  # daily backups
                backupCount=1,
                encoding='utf-8',
            )
        except IOError:
            wr.warn('Unable to write to log file at %s.' % (handler_path, ))
        else:
            handler_format = '[%(levelname)s] %(asctime)s :: %(name)s :: %(message)s'
            handler.setFormatter(lg.Formatter(handler_format))
            return handler
コード例 #46
0
defaults = {
    'ORLO_CONFIG': '/etc/orlo/orlo.ini',
    'ORLO_LOGDIR': '/var/log/orlo',
}

for var, default in defaults.items():
    try:
        defaults[var] = os.environ[var]
    except KeyError:
        pass

# Horrible hack, but I am tired
if os.environ.get('READTHEDOCS', None) == 'True':
    defaults['ORLO_LOGDIR'] = 'disabled'

config = RawConfigParser()

config.add_section('main')
config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ')
config.set('main', 'time_zone', 'UTC')
config.set('main', 'base_url', 'http://localhost:8080')

config.add_section('gunicorn')
config.set('gunicorn', 'workers', '2')
config.set('gunicorn', 'bind', '127.0.0.1:8080')

config.add_section('security')
config.set('security', 'enabled', 'false')
config.set('security', 'passwd_file', 'none')
config.set('security', 'secret_key', 'change_me')
# NOTE: orlo.__init__ checks that secret_key is not "change_me" when security
コード例 #47
0
def generate_logconf_file_contents():
    cfg_parser = RawConfigParser()

    cfg_parser.add_section('formatters')
    cfg_parser.add_section('formatter_simple')
    cfg_parser.set('formatters', 'keys', 'simple')
    cfg_parser.set('formatter_simple', 'format',
                   '%(name)s:%(levelname)s: %(message)s')

    cfg_parser.add_section('handlers')
    cfg_parser.add_section('handler_console')
    cfg_parser.add_section('handler_api_server_file')
    cfg_parser.set('handlers', 'keys', 'console,api_server_file')
    cfg_parser.set('handler_console', 'class', 'StreamHandler')
    cfg_parser.set('handler_console', 'level', 'WARN')
    cfg_parser.set('handler_console', 'args', '[]')
    cfg_parser.set('handler_console', 'formatter', 'simple')
    cfg_parser.set('handler_api_server_file', 'class', 'FileHandler')
    cfg_parser.set('handler_api_server_file', 'level', 'INFO')
    cfg_parser.set('handler_api_server_file', 'formatter', 'simple')
    cfg_parser.set('handler_api_server_file', 'args', "('api_server.log',)")

    cfg_parser.add_section('loggers')
    cfg_parser.add_section('logger_root')
    cfg_parser.add_section('logger_FakeWSGIHandler')
    cfg_parser.set('loggers', 'keys', 'root,FakeWSGIHandler')
    cfg_parser.set('logger_root', 'level', 'WARN')
    cfg_parser.set('logger_root', 'handlers', 'console')
    cfg_parser.set('logger_FakeWSGIHandler', 'level', 'INFO')
    cfg_parser.set('logger_FakeWSGIHandler', 'qualname', 'FakeWSGIHandler')
    cfg_parser.set('logger_FakeWSGIHandler', 'handlers', 'api_server_file')

    return cfg_parser
コード例 #48
0
    def setUp(self):
        self.ip = "localhost"
        self.port = "8080"
        self.headers = {
            'Content-Type': 'application/json',
            'Accept': 'application/json'
        }
        self.timeout = 10
        # connect
        self.conn = HTTPConnection(self.ip, self.port, timeout=self.timeout)
        # on board necessary descriptors
        # drop all databases
        # create the 5gtso db
        # python imports

        # load db IP port
        config = RawConfigParser()
        config.read("../db/db.properties")
        db_ip = config.get("MongoDB", "db.ip")
        db_port = int(config.get("MongoDB", "db.port"))
        operation_client = MongoClient(db_ip, db_port)
        fgtso_db = operation_client.fgtso
        fgtso_db.nsd.delete_many({})
        fgtso_db.vnfd.delete_many({})
        fgtso_db.ns.delete_many({})
        fgtso_db.nsir.delete_many({})
        fgtso_db.operation.delete_many({})
        fgtso_db.resources.delete_many({})
        # load desccriptors
        # path to descriptors folders

        path = "../descriptors/"
        # list of file names that contain ns and vnf descriptors
        ns_descriptors = ["CDN_all_NSD_0_2.json"]
        vnf_descriptors = [
            "CDN_SPR1_VNFD_0_2.json", "CDN_SPR21_VNFD_0_2.json",
            "CDN_SPR22_VNFD_0_2.json", "CDN_WEBSERVER_VNFD_0_2.json"
        ]
        # correspondance of nsdId and nsdCloudifyId
        nsdCloudifyId = {"vCDN_v02": "unknown"}
        # for each nsd create record to be inserted
        nsd_json = {}  # load json file here
        for nsd_file in ns_descriptors:
            with open(path + nsd_file) as nsd_json:
                nsd_json = load(nsd_json)
            nsd_record = {
                "nsdId": nsd_json["nsd"]["nsdIdentifier"],
                "nsdCloudifyId":
                nsdCloudifyId[nsd_json["nsd"]["nsdIdentifier"]],
                "version": nsd_json["nsd"]["version"],
                "nsdName": nsd_json["nsd"]["nsdName"],
                "nsdJson": nsd_json
            }
            fgtso_db.nsd.insert_one(nsd_record)
        # for each nsd create record to be inserted
        vnfd_json = {}  # load json file here
        for vnfd_file in vnf_descriptors:
            with open(path + vnfd_file) as vnfd_json:
                vnfd_json = load(vnfd_json)
            vnfd_record = {
                "vnfdId": vnfd_json["vnfdId"],
                "vnfdVersion": vnfd_json["vnfdVersion"],
                "vnfdName": vnfd_json["vnfProductName"],
                "vnfdJson": vnfd_json
            }
            fgtso_db.vnfd.insert_one(vnfd_record)
コード例 #49
0
ファイル: rooe.py プロジェクト: jorgebaranda/5gt-so
def instantiate_ns(nsId, nsd_json, vnfds_json, request):
    """
    Function description
    Parameters
    ----------
    param1: type
        param1 description
    Returns
    -------
    name: type
        return description
    """

    # extract the relevant information for the PA algorithm from the nsd_vnfd
    # extracted_info = extract_nsd_info_for_pa(nsd_json, vnfds_json, request)
    extracted_info = extract_nsd_info_for_pa(nsd_json, vnfds_json, request)
    log_queue.put(["DEBUG", "NSD extracted info for PA is:"])
    log_queue.put(["DEBUG", json.dumps(extracted_info, indent=4)])
    # first get mtp resources and lock db
    resources = get_mtp_resources()

    # ask pa to calculate the placement - read pa config from properties file
    config = RawConfigParser()
    config.read("../../sm/rooe/rooe.properties")
    pa_ip = config.get("PA", "pa.ip")
    pa_port = config.get("PA", "pa.port")
    pa_path = config.get("PA", "pa.path")
    pa_uri = "http://" + pa_ip + ":" + pa_port + pa_path
    # ask pa to calculate the placement - prepare the body
    paId = str(uuid4())
    body = {
        "ReqId": paId,
        "nfvi": resources,
        "nsd": extracted_info,
        "callback": "http://localhost:8080/5gt/so/v1/__callbacks/pa/" + paId
    }

    # ask pa to calculate the placement - do request
    header = {'Content-Type': 'application/json', 'Accept': 'application/json'}
    placement_info = {}
    # code below is commented until PA is ready
    #     try:
    #         conn = HTTPConnection(pa_ip, pa_port)
    #         conn.request("POST", pa_uri, body, header)
    #
    #         # ask pa to calculate the placement - read response and close connection
    #         rsp = self.conn.getresponse()
    #         placement_info = rsp.read()
    #         conn.close()
    #     except ConnectionRefusedError:
    #         # the PA server is not running or the connection configuration is wrong
    #         log_queue.put(["ERROR", "the PA server is not running or the connection configuration is wrong"])
    placement_info = {
        "usedNFVIPops": [{
            "NFVIPoPID": "openstack-site29_Zona1_w",
            "mappedVNFs": ["webserver", "spr21"]
        }, {
            "NFVIPoPID": "openstack-site31_w",
            "mappedVNFs": ["spr1"]
        }],
        "usedLLs": [{
            "LLID": "151515",
            "mappedVLs": ["VideoData"]
        }],
        "usedVLs": [{
            "NFVIPoP": "openstack-site29_Zona1_w",
            "mappedVLs": ["VideoDistribution"]
        }, {
            "NFVIPoP": "openstack-site31_w",
            "mappedVLs": ["mgt"]
        }],
        "totalLatency":
        1.3
    }

    # save placement info in database
    nsir_db.save_placement_info(nsId, placement_info)

    # ask cloudify/OSM to deploy vnfs
    coreMano = createWrapper()
    deployed_vnfs_info = {}
    deployed_vnfs_info = coreMano.instantiate_ns(nsId, nsd_json, body,
                                                 placement_info)
    if deployed_vnfs_info is not None and "sapInfo" in deployed_vnfs_info:
        log_queue.put(["DEBUG", "rooe: updating nsi:%s sapInfo" % nsId])
        ns_db.save_sap_info(nsId, deployed_vnfs_info["sapInfo"])

    # list of VLs to be deployed
    vls_info = extract_vls_info_mtp(resources, extracted_info, placement_info)

    # ask network execution engine to deploy the virtual links
    # line below commented until mtp is ready
    #     eenet.deploy_vls(vls_info, nsId)

    # set operation status as SUCCESSFULLY_DONE
    operationId = operation_db.get_operationId(nsId, "INSTANTIATION")
    if deployed_vnfs_info is not None:
        log_queue.put(["DEBUG", "NS Instantiation finished correctly"])
        operation_db.set_operation_status(operationId, "SUCCESSFULLY_DONE")
        # set ns status as INSTANTIATED
        ns_db.set_ns_status(nsId, "INSTANTIATED")
    else:
        log_queue.put(["ERROR", "NS Instantiation FAILED"])
        operation_db.set_operation_status(operationId, "FAILED")
        # set ns status as FAILED
        ns_db.set_ns_status(nsId, "FAILED")
コード例 #50
0
ファイル: units.py プロジェクト: felixonmars/OpenMDAO
        unit = eval(unit, {
            '__builtins__': None,
            'pi': pi
        }, _UNIT_LIB.unit_table)
    unit.set_name(name)
    if name in _UNIT_LIB.unit_table:
        if (_UNIT_LIB.unit_table[name]._factor != unit._factor
                or _UNIT_LIB.unit_table[name]._powers != unit._powers):
            raise KeyError("Unit %s already defined with " % name +
                           "different factor or powers")

    _UNIT_LIB.unit_table[name] = unit
    _UNIT_LIB.set('units', name, unit)


_UNIT_LIB = ConfigParser()


def _do_nothing(string):
    """
    Make the ConfigParser case sensitive.

    Defines an optionxform for the units configparser that
    does nothing, resulting in a case-sensitive parser.

    Parameters
    ----------
    string : str
        The string to be transformed for the ConfigParser

    Returns
コード例 #51
0
    def read_cfg(self):
        parser = RawConfigParser()
        parser.read(self.cfg_file)
        if not parser.has_section('mail'):
            print('Creating cfg file.')
            self.make_cfg()

        self.auth = parser.get('mail', 'auth')
        self.user = parser.get('mail', 'username')
        self.passwd = parser.get('mail', 'password')
        self.mailfrom = parser.get('mail', 'mailfrom')
        self.host = parser.get('mail', 'host')
        self.port = parser.get('mail', 'port')
        self.tls = parser.get('mail', 'tls')
コード例 #52
0
ファイル: units.py プロジェクト: felixonmars/OpenMDAO
def import_library(libfilepointer):
    """
    Import a units library, replacing any existing definitions.

    Parameters
    ----------
    libfilepointer : file
        new library file to work with

    Returns
    -------
    ConfigParser
        newly updated units library for the module
    """
    global _UNIT_LIB
    global _UNIT_CACHE
    _UNIT_CACHE = {}
    _UNIT_LIB = ConfigParser()
    _UNIT_LIB.optionxform = _do_nothing
    _UNIT_LIB.readfp(libfilepointer)
    required_base_types = ['length', 'mass', 'time', 'temperature', 'angle']
    _UNIT_LIB.base_names = list()
    # used to is_angle() and other base type checking
    _UNIT_LIB.base_types = dict()
    _UNIT_LIB.unit_table = dict()
    _UNIT_LIB.prefixes = dict()
    _UNIT_LIB.help = list()

    for prefix, factor in _UNIT_LIB.items('prefixes'):
        factor, comma, comment = factor.partition(',')
        _UNIT_LIB.prefixes[prefix] = float(factor)

    base_list = [0] * len(_UNIT_LIB.items('base_units'))

    for i, (unit_type, name) in enumerate(_UNIT_LIB.items('base_units')):
        _UNIT_LIB.base_types[unit_type] = i
        powers = list(base_list)
        powers[i] = 1
        # print '%20s'%unit_type, powers
        # cant use add_unit because no base units exist yet
        _new_unit(name, 1, powers)
        _UNIT_LIB.base_names.append(name)

    # test for required base types
    missing = [
        utype for utype in required_base_types
        if utype not in _UNIT_LIB.base_types
    ]
    if missing:
        raise ValueError('Not all required base type were present in the'
                         ' config file. missing: %s, at least %s required' %
                         (missing, required_base_types))

    # Explicit unitless 'unit'.
    _new_unit('unitless', 1, list(base_list))
    _update_library(_UNIT_LIB)
    return _UNIT_LIB
コード例 #53
0
 def remove_option(self, section, option):
     if self.has_option(section, option):
         RawConfigParser.remove_option(self, section, option)
コード例 #54
0
def ini_to_dict(fname, section):
    """Convert *section* of .ini *config* to dictionary."""
    from six.moves.configparser import RawConfigParser, NoOptionError

    config = RawConfigParser()
    config.read(fname)

    conf = {}
    conf['posttroll'] = {}
    posttroll = conf['posttroll']
    posttroll['topics'] = config.get(section, 'topics').split()
    try:
        nameservers = config.get(section, 'nameservers')
        nameservers = nameservers.split()
    except (NoOptionError, ValueError):
        nameservers = None
    posttroll['nameservers'] = nameservers

    try:
        addresses = config.get(section, 'addresses')
        addresses = addresses.split()
    except (NoOptionError, ValueError):
        addresses = None
    posttroll['addresses'] = addresses

    try:
        services = config.get(section, 'services')
        services = services.split()
    except (NoOptionError, ValueError):
        services = ""
    posttroll['services'] = services

    try:
        publish_port = config.get(section, 'publish_port')
    except NoOptionError:
        publish_port = 0
    posttroll['publish_port'] = publish_port

    posttroll['publish_topic'] = config.get(section, "publish_topic")

    conf['patterns'] = {section: {}}
    patterns = conf['patterns'][section]
    patterns['pattern'] = config.get(section, 'pattern')
    patterns['critical_files'] = config.get(section, 'critical_files')
    patterns['wanted_files'] = config.get(section, 'wanted_files')
    patterns['all_files'] = config.get(section, 'all_files')
    patterns['is_critical_set'] = False
    try:
        patterns['variable_tags'] = config.get(section,
                                               'variable_tags').split(',')
    except NoOptionError:
        patterns['variable_tags'] = []

    try:
        conf['time_tolerance'] = config.getint(section, "time_tolerance")
    except NoOptionError:
        conf['time_tolerance'] = 30
    try:
        # Seconds
        conf['timeliness'] = config.getint(section, "timeliness")
    except (NoOptionError, ValueError):
        conf['timeliness'] = 1200

    try:
        conf['num_files_premature_publish'] = \
            config.getint(section, "num_files_premature_publish")
    except (NoOptionError, ValueError):
        conf['num_files_premature_publish'] = -1

    try:
        conf['group_by_minutes'] = config.getint(section, 'group_by_minutes')
    except (NoOptionError, ValueError):
        pass

    try:
        kps = config.get(section, 'keep_parsed_keys')
        conf['keep_parsed_keys'] = kps.split()
    except NoOptionError:
        pass

    try:
        conf['providing_server'] = config.get(section, "providing_server")
    except (NoOptionError, ValueError):
        conf['providing_server'] = None

    # Need to also add the section name to name the segment_gatherer service
    posttroll['section'] = section

    return conf
コード例 #55
0
ファイル: configstore.py プロジェクト: mcanthony/Photini
class ConfigStore(object):
    def __init__(self, name):
        self.config = RawConfigParser()
        self.file_opts = {}
        if six.PY3:
            self.file_opts['encoding'] = 'utf-8'
        if hasattr(appdirs, 'user_config_dir'):
            config_dir = appdirs.user_config_dir('photini')
        else:
            config_dir = appdirs.user_data_dir('photini')
        if not os.path.isdir(config_dir):
            os.makedirs(config_dir, mode=stat.S_IRWXU)
        self.file_name = os.path.join(config_dir, name + '.ini')
        if name == 'editor':
            for old_file_name in (os.path.expanduser('~/photini.ini'),
                                  os.path.join(config_dir, 'photini.ini')):
                if os.path.exists(old_file_name):
                    self.config.read(old_file_name, **self.file_opts)
                    self.save()
                    os.unlink(old_file_name)
        self.config.read(self.file_name, **self.file_opts)
        self.timer = QtCore.QTimer()
        self.timer.setSingleShot(True)
        self.timer.setInterval(3000)
        self.timer.timeout.connect(self.save)
        self.has_section = self.config.has_section

    def get(self, section, option, default=None):
        if self.config.has_option(section, option):
            result = self.config.get(section, option)
            if six.PY2:
                return result.decode('utf-8')
            return result
        if default is not None:
            self.set(section, option, default)
        return default

    def set(self, section, option, value):
        if not self.config.has_section(section):
            self.config.add_section(section)
        if (self.config.has_option(section, option)
                and self.config.get(section, option) == value):
            return
        if six.PY2:
            value = value.encode('utf-8')
        self.config.set(section, option, value)
        self.timer.start()

    def remove_section(self, section):
        if not self.config.has_section(section):
            return
        for option in self.config.options(section):
            self.config.remove_option(section, option)
        self.config.remove_section(section)
        self.timer.start()

    def save(self):
        self.config.write(open(self.file_name, 'w', **self.file_opts))
        os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR)
コード例 #56
0
ファイル: apollo.py プロジェクト: Blue-infosec/APOLLO
def parse_module_definition(mod_info):

    database_names = set()

    for root, dirs, filenames in os.walk(mod_dir):
        for f in filenames:
            if f.endswith(".txt"):
                mod_def = os.path.join(root, f)
                fread = open(mod_def, 'r')
                contents = fread.read()

                parser = RawConfigParser()
                parser.read(mod_def)

                mod_name = mod_def
                query_name = parser.get('Query Metadata', 'QUERY_NAME')
                database_name = parser.get('Database Metadata',
                                           'DATABASE').split(',')
                activity = parser.get('Query Metadata', 'ACTIVITY')
                key_timestamp = parser.get('Query Metadata', 'KEY_TIMESTAMP')

                for database in database_name:
                    database_names.add(database)

                for db in database_name:
                    uniquekey = mod_def + "#" + db
                    mod_info[uniquekey] = []

                    if version == 'yolo':
                        for section in parser.sections():
                            try:
                                if "SQL Query" in section:
                                    sql_query = parser.get(section, 'QUERY')
                                    mod_info[uniquekey] = [
                                        query_name, db, activity,
                                        key_timestamp, sql_query
                                    ]
                            except:
                                pass
                    else:
                        for section in parser.sections():
                            try:
                                if version in section:
                                    sql_query = parser.get(section, 'QUERY')
                                    mod_info[uniquekey] = [
                                        query_name, db, activity,
                                        key_timestamp, sql_query
                                    ]
                            except:
                                pass

    print(
        "\n==> Parsing", len(mod_info),
        "modules (Note: Some modules may be run on more than one database.)")

    count = 1
    modules = set()

    for item in sorted(mod_info):
        dbs = item.split('#')
        for mod in dbs:
            modules.add(dbs[0])
        print("\t[" + str(count) + "] " + str(dbs[0]) + " on " + str(dbs[1]))
        count = count + 1

    print("\n==> Will lazily run APOLLO on " + str(len(modules)) +
          " unique modules and " + str(len(database_names)) +
          " unique databases.")

    print("\n==> Searching for database files...this may take a hot minute...")
    print()
    for root, dirs, filenames in os.walk(data_dir):
        for f in filenames:
            if f in database_names:
                for mod_def, mod_data in mod_info.items():
                    if mod_data:
                        if mod_data[1] == f:
                            mod_info[mod_def].append(os.path.join(root, f))

    for mod_def, mod_data in mod_info.items():
        mod_def_split = mod_def.split('#')
        if mod_data:
            print(mod_def_split[0] + " on " + mod_def_split[1], ":",
                  len(mod_data) - 5, "databases.")
            run_module(mod_def, mod_data[0], mod_data[5:], mod_data[2],
                       mod_data[3], mod_data[4])
            print()
        else:
            print(mod_def_split[0] + " on " + mod_def_split[1],
                  ": Module not supported for version of data provided.")
            print()
コード例 #57
0
ファイル: helpers.py プロジェクト: vrautela/treadmill-workdir
 def getint(self, section, option):
     if not self.has_option(section, option):
         return self.DEF_INT
     return RawConfigParser.getint(self, section, option)
コード例 #58
0
import copy

# project imports
from coreMano.cloudifyWrapper import CloudifyWrapper
from coreMano.osmWrapper import OsmWrapper
# from sm.rooe.pa import pa
from db.ns_db import ns_db
from db.operation_db import operation_db
from db.nsir_db import nsir_db
from db.resources_db import resources_db
from coreMano.coreManoWrapper import createWrapper
from sm.eenet import eenet
from nbi import log_queue
from sbi import sbi

config = RawConfigParser()
config.read("../../coreMano/coreMano.properties")
core_mano_name = config.get("CoreMano", "name")


def amending_pa_output(nsd_info, placement_info):
    """
    Function description
    Parameters
    ----------
    nsd_info: dict
        dictionary with information of the nsd and the vnfs included in it.
    placement_info:
        dictionary with the output of the PA, but requires amendments to include VL that are not connecting VNF's
    -------
    Returns
コード例 #59
0
def instantiate_ns(nsId, nsd_json, vnfds_json, request, nestedInfo=None):
    """
    Function description
    Parameters
    ----------
    param1: type
        param1 description
    Returns
    -------
    name: type
        return description
    """
    # extract the relevant information for the PA algorithm from the nsd_vnfd
    log_queue.put(["INFO", "*****Time measure: ROE starting ROE processing"])
    extracted_info = extract_nsd_info_for_pa(nsd_json, vnfds_json, request)
    log_queue.put(["INFO", "*****Time measure: ROE extracted NSD info at ROE"])
    log_queue.put(["INFO", dumps(extracted_info, indent=4)])
    # first get mtp resources and lock db
    resources = sbi.get_mtp_resources()
    log_queue.put(["INFO", "MTP resources are:"])
    log_queue.put(["INFO", dumps(resources, indent=4)])
    log_queue.put(["INFO", "*****Time measure: ROE retrieved MTP resources"])

    # ask pa to calculate the placement - read pa config from properties file
    config = RawConfigParser()
    config.read("../../sm/rooe/rooe.properties")
    pa_ip = config.get("PA", "pa.ip")
    pa_port = config.get("PA", "pa.port")
    pa_path = config.get("PA", "pa.path")
    pa_enable = config.get("PA", "pa.enable")
    placement_info = {}
    if pa_enable == "yes":
        pa_uri = "http://" + pa_ip + ":" + pa_port + pa_path
        # ask pa to calculate the placement - prepare the body
        paId = str(uuid4())
        pa_resources = parse_resources_for_pa(resources, vnfds_json.keys())
        body_pa = {
            "ReqId": paId,
            "nfvi": pa_resources,
            "nsd": extracted_info["nsd"],
            "callback":
            "http://localhost:8080/5gt/so/v1/__callbacks/pa/" + paId
        }
        log_queue.put(["INFO", "Body for PA is:"])
        log_queue.put(["INFO", dumps(body_pa, indent=4)])
        # ask pa to calculate the placement - do request
        header = {
            'Content-Type': 'application/json',
            'Accept': 'application/json'
        }
        log_queue.put(["INFO", "*****Time measure: ROE PA request generated"])
        try:
            conn = HTTPConnection(pa_ip, pa_port)
            conn.request("POST", pa_uri, dumps(body_pa), header)
            # ask pa to calculate the placement - read response and close connection
            rsp = conn.getresponse()
            placement_info = rsp.read().decode('utf-8')
            placement_info = loads(placement_info)
            conn.close()
        except ConnectionRefusedError:
            # the PA server is not running or the connection configuration is wrong
            log_queue.put([
                "ERROR",
                "the PA server is not running or the connection configuration is wrong"
            ])
        log_queue.put(["INFO", "output of the PA is: "])
        log_queue.put(["INFO", placement_info])
        placement_info = amending_pa_output(extracted_info["nsd"],
                                            placement_info)
        log_queue.put(["INFO", "*****Time measure: ROE PA calculation done"])
        log_queue.put(["INFO", "PA tuned output is:"])
        log_queue.put(["INFO", placement_info])
    else:
        # to be removed when PA code tested: static placement for testing purposes
        pa_responses = config.items("RESPONSE")
        for pa_response in pa_responses:
            if (nsd_json["nsd"]["nsdIdentifier"].lower().find(pa_response[0])
                    != -1):
                placement_info = json.loads(pa_response[1])
        log_queue.put(["INFO", "PA TUNED (manually) output is:"])
        log_queue.put(["DEBUG", placement_info])

    log_queue.put(
        ["DEBUG",
         "Service NameId is: %s" % nsd_json["nsd"]["nsdIdentifier"]])
    if nestedInfo:
        key = next(iter(nestedInfo))
        log_queue.put(["DEBUG", "the key of nestedInfo in ROOE is: %s" % key])
        if len(nestedInfo[key]) > 1:
            # nested from a consumer domain
            nsId_tmp = nsId
        else:
            # nested local
            nsId_tmp = nsId + '_' + next(iter(nestedInfo))
    else:
        nsId_tmp = nsId

    nsir_db.save_placement_info(nsId_tmp, placement_info)
    # ask cloudify/OSM to deploy vnfs
    coreMano = createWrapper()
    deployed_vnfs_info = {}
    deployed_vnfs_info = coreMano.instantiate_ns(nsId, nsd_json, vnfds_json,
                                                 request, placement_info,
                                                 resources, nestedInfo)
    log_queue.put(["INFO", "The deployed_vnfs_info"])
    log_queue.put(["INFO", dumps(deployed_vnfs_info, indent=4)])
    if (deployed_vnfs_info is not None) and ("sapInfo" in deployed_vnfs_info):
        log_queue.put([
            "INFO",
            "ROOE: updating nsi:%s sapInfo: %s" %
            (nsId, deployed_vnfs_info["sapInfo"])
        ])
        ns_db.save_sap_info(nsId, deployed_vnfs_info["sapInfo"])
        log_queue.put(["INFO", "*****Time measure: ROE created VNF's"])
    if deployed_vnfs_info is not None:
        # list of VLs to be deployed
        vls_info = extract_vls_info_mtp(resources, extracted_info,
                                        placement_info, nsId_tmp, nestedInfo)
        log_queue.put(["INFO", "*****Time measure: ROE extracted VL's at MTP"])
        # ask network execution engine to deploy the virtual links
        eenet.deploy_vls(vls_info, nsId_tmp)
    log_queue.put(["INFO", "*****Time measure: ROE created LL's at MTP"])

    # set operation status as SUCCESSFULLY_DONE
    if (nsId_tmp.find('_') == -1):
        # the service is single, I can update the operationId, and the status
        operationId = operation_db.get_operationId(nsId, "INSTANTIATION")
        if deployed_vnfs_info is not None:
            log_queue.put(["INFO", "NS Instantiation finished correctly"])
            operation_db.set_operation_status(operationId, "SUCCESSFULLY_DONE")
            # set ns status as INSTANTIATED
            ns_db.set_ns_status(nsId, "INSTANTIATED")
        else:
            log_queue.put(["ERROR", "NS Instantiation FAILED"])
            operation_db.set_operation_status(operationId, "FAILED")
            # set ns status as FAILED
            ns_db.set_ns_status(nsId, "FAILED")

    log_queue.put(["INFO", "INSTANTIATION FINISHED :)"])
コード例 #60
0
import os
import os.path

from six.moves.configparser import RawConfigParser, NoOptionError

from trollsift import Parser, compose
from pytroll_collectors import trigger
from pytroll_collectors import region_collector
from posttroll import message, publisher
try:
    from satpy.resample import get_area_def
except ImportError:
    from mpop.projector import get_area_def

LOGGER = logging.getLogger(__name__)
CONFIG = RawConfigParser()
PUB = None


def get_metadata(fname):
    """Parse metadata from the file."""
    res = None
    for section in CONFIG.sections():
        try:
            parser = Parser(CONFIG.get(section, "pattern"))
        except NoOptionError:
            continue
        if not parser.validate(fname):
            continue
        res = parser.parse(fname)
        res.update(dict(CONFIG.items(section)))