Esempio n. 1
0
def set_up_logger_syslog_handler(config):
    handlers = {}
    formatters = {}
    if config.has_section('Logging'):
        if config.has_option('Logging', 'loghost') and \
            config.has_option('Logging', 'logport'):
            log_host = config.get('Logging', 'loghost')
            log_port = int(config.get('Logging', 'logport'))
            log_address = (log_host, log_port)
            formatters['syslog_formatter'] = {
                'format': '%(asctime)s %(name)s: %(levelname)s %(message)s',
                'datefmt': '%b %e %H:%M:%S',
            }
            socktype = socket.SOCK_DGRAM
            if config.has_option('Logging', 'logtcp'):
                if config.getboolean('Logging', 'logtcp'):
                    socktype = socket.SOCK_STREAM
                else:
                    socktype = socket.SOCK_DGRAM
            facility = logging.handlers.SysLogHandler.LOG_USER
            if config.has_option('Logging', 'logfacility'):
                try:
                    facility = logging.handlers.SysLogHandler.facility_names[
                        config.get('Logging', 'logfacility')]
                except KeyError:
                    raise Exception('Invalid "logfacility" value of "%s"' %
                        config.get('Logging', 'logfacility'))
            handlers['syslog'] = {
                'class': 'logging.handlers.SysLogHandler',
                'formatter': 'syslog_formatter',
                'address': log_address,
                'facility': facility,
                'socktype': socktype,
            }
    return handlers, formatters
Esempio n. 2
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ("WARNING: pyTivo.conf does not exist.\n" + "Assuming default values.")
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith("_tivo_"):
            tsn = section[6:]
            if tsn.upper() not in ["SD", "HD"]:
                if config.has_option(section, "name"):
                    tivo_names[tsn] = config.get(section, "name")
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, "address"):
                    tivos[tsn] = config.get(section, "address")

    for section in ["Server", "_tivo_SD", "_tivo_HD"]:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 3
0
def perform_prestart_checks(expected_hostname):
  # Check if current hostname is equal to expected one (got from the server
  # during bootstrap.
  global config

  if expected_hostname is not None:
    current_hostname = hostname.hostname(config)
    if current_hostname != expected_hostname:
      print("Determined hostname does not match expected. Please check agent "
            "log for details")
      msg = "Ambari agent machine hostname ({0}) does not match expected ambari " \
            "server hostname ({1}). Aborting registration. Please check hostname, " \
            "hostname -f and /etc/hosts file to confirm your " \
            "hostname is setup correctly".format(current_hostname, expected_hostname)
      logger.error(msg)
      sys.exit(1)
  # Check if there is another instance running
  if os.path.isfile(ProcessHelper.pidfile) and not OSCheck.get_os_family() == OSConst.WINSRV_FAMILY:
    print("%s already exists, exiting" % ProcessHelper.pidfile)
    sys.exit(1)
  # check if ambari prefix exists
  elif config.has_option('agent', 'prefix') and not os.path.isdir(os.path.abspath(config.get('agent', 'prefix'))):
    msg = "Ambari prefix dir %s does not exists, can't continue" \
          % config.get("agent", "prefix")
    logger.error(msg)
    print(msg)
    sys.exit(1)
  elif not config.has_option('agent', 'prefix'):
    msg = "Ambari prefix dir %s not configured, can't continue"
    logger.error(msg)
    print(msg)
    sys.exit(1)
Esempio n. 4
0
def get_mon_api_config(config):
    mon_api_config = {'is_enabled': False,
                      'url': '',
                      'project_name': '',
                      'username': '',
                      'password': False,
                      'use_keystone': True,
                      'keystone_url': '',
                      'dimensions': None}

    if config.has_option("Main", "dimensions"):
        # parse comma separated dimensions into a dimension list
        try:
            dim_list = [dim.split(':') for dim in config.get('Main', 'dimensions').split(',')]
            mon_api_config['dimensions'] = {key.strip(): value.strip() for key, value in dim_list}
        except ValueError:
            mon_api_config['dimensions'] = {}

    if config.has_section("Api"):
        options = {"url": config.get,
                   "project_name": config.get,
                   "username": config.get,
                   "password": config.get,
                   "use_keystone": config.getboolean,
                   "keystone_url": config.get}

        for name, func in options.iteritems():
            if config.has_option("Api", name):
                mon_api_config[name] = func("Api", name)

    return mon_api_config
Esempio n. 5
0
def set_up_logger_file_handler(config):
    handlers = {}
    formatters = {}
    if config.has_section('Logging'):
        # create handler
        # add to logger
        # create formatter
        if config.has_option('Logging', 'logfile'):
            logfile = config.get('Logging', 'logfile')
            if config.has_option('Logging', 'loghistory'):
                loghistory = int(config.get('Logging', 'loghistory'))
            else:
                loghistory = 7
            formatters['file_formatter'] = {
                'format': "%(asctime)s - %(levelname)s - %(message)s",
            }
            handlers['file'] = {
                'class': 'logging.handlers.TimedRotatingFileHandler',
                'formatter': 'file_formatter',
                'filename': logfile,
                'when': 'd',
                'interval': 1,
                'backupCount': loghistory,
            }
    return handlers, formatters
Esempio n. 6
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ('WARNING: pyTivo.conf does not exist.\n' +
               'Assuming default values.')
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith('_tivo_'):
            tsn = section[6:]
            if tsn.upper() not in ['SD', 'HD']:
                if config.has_option(section, 'name'):
                    tivo_names[tsn] = config.get(section, 'name')
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, 'address'):
                    tivos[tsn] = config.get(section, 'address')

    for section in ['Server', '_tivo_SD', '_tivo_HD']:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 7
0
def connect(config, echo=False):
    """Connect to a given Clusto datastore.

    Accepts a config object with (at least) a DSN string

    e.g. mysql://user:[email protected]/clustodb
    e.g. sqlite:///somefile.db

    @param config: the config object
    """

    dsn = config.get('clusto', 'dsn')
    if dsn.startswith('http'):
        SESSION.clusto_api = True
    else:
        SESSION.configure(bind=create_engine(dsn,
                                             echo=echo,
                                             poolclass=SingletonThreadPool,
                                             pool_recycle=600
                                             ))

    SESSION.clusto_version = None

    if config.has_option('clusto', 'versioning'):
        SESSION.clusto_versioning_enabled = config.getboolean('clusto', 'versioning')
    else:
        SESSION.clusto_versioning_enabled = False

    # Set the log level from config, default is WARNING
    if config.has_option('clusto', 'loglevel'):
        rootlog = logging.getLogger()
        level = logging.getLevelName(config.get('clusto', 'loglevel'))
        rootlog.setLevel(level)

    # Setup audit logging to a file
    if config.has_option('clusto', 'auditlog'):
        auditlog = logging.getLogger('clusto.audit')
        auditlog.propagate = False
        auditlog.setLevel(logging.INFO)
        logfile = config.get('clusto', 'auditlog')
        handler = logging.handlers.WatchedFileHandler(logfile)
        handler.setFormatter(logging.Formatter(
            '%(asctime)s %(name)s %(levelname)s %(message)s',
            '%Y-%m-%d %H:%M:%S'
        ))
        auditlog.addHandler(handler)

    try:
        memcache_servers = config.get('clusto', 'memcached').split(',')
#       Memcache should only be imported if we're actually using it, yes?
        import memcache
        logging.info('Memcache server list: %s' % config.get('clusto', 'memcached'))
        SESSION.memcache = memcache.Client(memcache_servers, debug=0)
    except:
        SESSION.memcache = None
Esempio n. 8
0
File: conf.py Progetto: seveas/ubot
 def __init__(self, config, section):
     self.is_active = False
     self.name = section
     if config.has_option(section, 'nickmatch'):
         self.nickmatch = re.compile(config.get(section, 'nickmatch'), re.I)
     else:
         raise ubot.exceptions.ConfigError("No nickmatch specified for %s" % section)
     if config.has_option(section, 'priority'):
         self.priority = int(config.get(section, 'priority'))
     else:
         raise ubot.exceptions.ConfigError("No priority specified for %s" % section)
Esempio n. 9
0
def setup_logging(config):
    if config.has_option('ircbot', 'log_config'):
        log_config = config.get('ircbot', 'log_config')
        fp = os.path.expanduser(log_config)
        if not os.path.exists(fp):
            raise Exception("Unable to read logging config file at %s" % fp)
        logging.config.fileConfig(fp)
    elif config.has_option('ircbot', 'log_file'):
        log_file = config.get('ircbot', 'log_file')
        logging.basicConfig(level=logging.DEBUG, filename=log_file)
    else:
        logging.basicConfig(level=logging.DEBUG)
Esempio n. 10
0
def get_logging_config(cfg_path=None):
    logging_config = {
        'log_level': None,
        'collector_log_file': '/var/log/datadog/collector.log',
        'forwarder_log_file': '/var/log/datadog/forwarder.log',
        'dogstatsd_log_file': '/var/log/datadog/dogstatsd.log',
        'pup_log_file': '/var/log/datadog/pup.log',
        'log_to_syslog': True,
        'syslog_host': None,
        'syslog_port': None,
    }

    config_path = get_config_path(cfg_path, os_name=getOS())
    config = ConfigParser.ConfigParser()
    config.readfp(skip_leading_wsp(open(config_path)))

    if config.has_section('handlers') or config.has_section('loggers') or config.has_section('formatters'):
        sys.stderr.write("Python logging config is no longer supported and will be ignored.\nTo configure logging, update the logging portion of 'datadog.conf' to match:\n  'https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example'.\n")

    for option in logging_config:
        if config.has_option('Main', option):
            logging_config[option] = config.get('Main', option)

    levels = {
        'CRITICAL': logging.CRITICAL,
        'DEBUG': logging.DEBUG,
        'ERROR': logging.ERROR,
        'FATAL': logging.FATAL,
        'INFO': logging.INFO,
        'WARN': logging.WARN,
        'WARNING': logging.WARNING,
    }
    if config.has_option('Main', 'log_level'):
        logging_config['log_level'] = levels.get(config.get('Main', 'log_level'))

    if config.has_option('Main', 'log_to_syslog'):
        logging_config['log_to_syslog'] = config.get('Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'syslog_host'):
        host = config.get('Main', 'syslog_host').strip()
        if host:
            logging_config['syslog_host'] = host
        else:
            logging_config['syslog_host'] = None

    if config.has_option('Main', 'syslog_port'):
        port = config.get('Main', 'syslog_port').strip()
        try:
            logging_config['syslog_port'] = int(port)
        except:
            logging_config['syslog_port'] = None

    return logging_config
Esempio n. 11
0
    def persist_runner(self):
        config = self.config_parser
        logger.debug("initializing backend %s" %
                     config.get('core', 'persist_backend'))
        persist_backend = getattr(
            persist, '%s_backend' % config.get('core', 'persist_backend'))

        persist_backend = persist_backend(config)

        spool = self.spool

        num_persist_workers = None
        if config.has_option('core', 'persist_workers'):
            num_persist_workers = abs(config.getint('core', 'persist_workers'))

        if not num_persist_workers:
            num_persist_workers = PERSIST_POOL_WORKERS

        self.persist_pool = pool = Pool(num_persist_workers)

        while 1:
            spool_file = None
            if self.stop_now:
                break
            pool.wait_available()
            try:
                spool_file = spool.get(5)
            except Empty:
                sleep(1)
                continue

            pool.spawn(self.persist_data, spool_file, persist_backend)

        pool.join()
Esempio n. 12
0
    def run(self):
        logger.debug("initializing spool")
        config = self.config_parser

        self.spool = spool = Spooler(config)

        num_gather_workers = None
        if config.has_option('core', 'gather_workers'):
            num_gather_workers = abs(config.getint('core', 'gather_workers'))

        if not num_gather_workers:
            num_gather_workers = GATHER_POOL_WORKERS

        self._gather_pool = pool = Pool(num_gather_workers)

        persist_queue = self.persist_queue

        self.collect_manager = CollectPlugins(
            base_class=Collect,
            config=config,
            init_args=(config, persist_queue, spool, pool),
            entry_points='arke_plugins',
        )

        self.collect_manager.load(pool=self._gather_pool)
        try:
            self.persist_runner()
        except KeyboardInterrupt:
            pass

        self.shutdown()
Esempio n. 13
0
def setup_logging(config=None):
    """Turn down dependent library log levels so they aren't noise."""
    global CONFIGURED
    FORMAT = '%(asctime)s  %(levelname)-8s [%(name)-15s] %(message)s'
    DATEFMT = '%Y-%m-%d %H:%M:%S'
    # set 3rd party library logging levels to sanity points
    loglevels = {
        "irc.client": logging.INFO,
        "gerrit.GerritWatcher": logging.INFO,
        "paramiko.transport": logging.INFO,
        "pyelasticsearch": logging.INFO,
        "requests.packages.urllib3.connectionpool": logging.WARN,
        "urllib3.connectionpool": logging.WARN
    }

    if config is not None and config.has_option('ircbot', 'log_config'):
        log_config = config.get('ircbot', 'log_config')
        fp = os.path.expanduser(log_config)
        if not os.path.exists(fp):
            raise Exception("Unable to read logging config file at %s" % fp)
        logging.config.fileConfig(fp)
    else:
        logging.basicConfig(
            level=logging.DEBUG,
            format=FORMAT,
            datefmt=DATEFMT
        )
        for module in loglevels:
            log = logging.getLogger(module)
            log.setLevel(loglevels[module])
    CONFIGURED = True
Esempio n. 14
0
    def register_schedule(self):
        channel = config.get('irc', 'channel')

        for schedule in config.getlist('irc', 'schedules'):
            sect = ':'.join(('schedule', schedule))

            # do not evaluate isenabled() here.
            # if it does, the disabled action will never be scheduled.
            if not config.has_section(sect):
                logging.error('[schedule] [%s] no such schedule', sect)
                continue

            if not config.has_option(sect, 'action'):
                logging.error('[schedule] [%s] no action specified', sect)
                continue

            action = ':'.join(('action', config.get(sect, 'action')))
            if not config.has_section(action):
                logging.error('[schedule] [%s] invalid action specified', sect)
                continue

            interval = config.getint(sect, 'interval')
            if interval < 60:
                logging.error('[schedule] [%s] interval too short', sect)
                continue

            self.reactor.register_schedule(interval, self.do_action,
                                           action, self.connection,
                                           None, {'target': channel}, sect)
            logging.info('[schedule] [%s] registered', sect)
Esempio n. 15
0
File: agent.py Progetto: dlobue/arke
    def persist_runner(self):
        config = self.config_parser
        logger.debug("initializing backend %s" % config.get('core', 'persist_backend'))
        persist_backend = getattr(persist, '%s_backend' %
                config.get('core', 'persist_backend'))

        persist_backend = persist_backend(config)

        spool = self.spool

        num_persist_workers = None
        if config.has_option('core', 'persist_workers'):
            num_persist_workers = abs(config.getint('core', 'persist_workers'))

        if not num_persist_workers:
            num_persist_workers = PERSIST_POOL_WORKERS

        self.persist_pool = pool = Pool(num_persist_workers)

        while 1:
            spool_file = None
            if self.stop_now:
                break
            pool.wait_available()
            try:
                spool_file = spool.get(5)
            except Empty:
                sleep(1)
                continue

            pool.spawn(self.persist_data, spool_file, persist_backend)

        pool.join()
Esempio n. 16
0
 def init_cache(self, config):
     self.cache_model = None
     if self.no_cache:
         return
     if not config.has_option('cache', 'table'):
         return
     cache_table = config.get('cache', 'table')
     if not cache_table:
         return
     logger.info("Init cache table `%s`", cache_table)
     self.cache_model = CacheTendersModel
     self.cache_model._meta.database = self.database
     self.cache_model._meta.db_table = cache_table
     self.cache_max_size = 0xfff0
     try:
         self.cache_model.select().count()
         cache_table_exists = True
     except:
         cache_table_exists = False
         self.database.rollback()
     if self.drop_cache and cache_table_exists:
         logger.warning("Drop cache table `%s`", cache_table)
         self.cache_model.drop_table()
         cache_table_exists = False
     if not cache_table_exists:
         logger.info("Create cache table `%s`", cache_table)
         self.cache_model.create_table()
Esempio n. 17
0
def get_bin(fname):
    global bin_paths

    logger = logging.getLogger('pyTivo.config')

    if fname in bin_paths:
        return bin_paths[fname]

    if config.has_option('Server', fname):
        fpath = config.get('Server', fname)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath
        else:
            logger.error('Bad %s path: %s' % (fname, fpath))

    if sys.platform == 'win32':
        fext = '.exe'
    else:
        fext = ''

    for path in ([os.path.join(os.path.dirname(__file__), 'bin')] +
                 os.getenv('PATH').split(os.pathsep)):
        fpath = os.path.join(path, fname + fext)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath

    logger.warn('%s not found' % fname)
    return None
Esempio n. 18
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(special_section_prefixes)
                      or section in special_section_names)
             ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if not config.getboolean('Server', 'nosettings', fallback=False):
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_togo('path'):
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 19
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(('_tivo_', 'logger_', 'handler_',
                                          'formatter_'))
                      or section in ('Server', 'loggers', 'handlers',
                                     'formatters')
              )
    ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server('nosettings', 'false').lower() in ['false', 'no', 'off']:
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_server('togo_path'):    
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 20
0
File: agent.py Progetto: dlobue/arke
    def run(self):
        logger.debug("initializing spool")
        config = self.config_parser

        self.spool = spool = Spooler(config)

        num_gather_workers = None
        if config.has_option('core', 'gather_workers'):
            num_gather_workers = abs(config.getint('core', 'gather_workers'))

        if not num_gather_workers:
            num_gather_workers = GATHER_POOL_WORKERS

        self._gather_pool = pool = Pool(num_gather_workers)

        persist_queue = self.persist_queue

        self.collect_manager = CollectPlugins(base_class=Collect,
                                              config=config,
                                              init_args=(config, persist_queue, spool, pool),
                                             entry_points='arke_plugins',
                                            )

        self.collect_manager.load(pool=self._gather_pool)
        try:
            self.persist_runner()
        except KeyboardInterrupt:
            pass

        self.shutdown()
Esempio n. 21
0
def get_bin(fname):
    global bin_paths

    logger = logging.getLogger('pyTivo.config')

    if fname in bin_paths:
        return bin_paths[fname]

    if config.has_option('Server', fname):
        fpath = config.get('Server', fname)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath
        else:
            logger.error('Bad %s path: %s' % (fname, fpath))

    if sys.platform == 'win32':
        fext = '.exe'
    else:
        fext = ''

    for path in ([os.path.join(os.path.dirname(__file__), 'bin')] +
                 os.getenv('PATH').split(os.pathsep)):
        fpath = os.path.join(path, fname + fext)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath

    logger.warn('%s not found' % fname)
    return None
Esempio n. 22
0
def extract_agent_config(config):
    # get merged into the real agentConfig
    agentConfig = {}

    backend = config.get('Main', 'service_discovery_backend')
    agentConfig['service_discovery'] = True

    conf_backend = None
    if config.has_option('Main', 'sd_config_backend'):
        conf_backend = config.get('Main', 'sd_config_backend')

    if backend not in SD_BACKENDS:
        log.error(
            "The backend {0} is not supported. Service discovery won't be enabled."
            .format(backend))
        agentConfig['service_discovery'] = False

    if conf_backend is None:
        log.warning(
            'No configuration backend provided for service discovery. Only auto config templates will be used.'
        )
    elif conf_backend not in SD_CONFIG_BACKENDS:
        log.error(
            "The config backend {0} is not supported. Only auto config templates will be used."
            .format(conf_backend))
        conf_backend = None
    agentConfig['sd_config_backend'] = conf_backend

    additional_config = extract_sd_config(config)
    agentConfig.update(additional_config)
    return agentConfig
Esempio n. 23
0
def parse_channel_config_file(file_path):
    config = ConfigParser()
    with codecs.open(file_path, 'r', encoding='utf-8') as f:
        config.readfp(f)

    channel_list = []

    # validate and parse the config file
    if not config.sections():
        print >> sys.stderr, "no section in config file!"
        sys.exit(1)
    for section in config.sections():
        channel_info = {}

        OPTIONS = (u'channel_name', u'channel_description', u'rss_url')
        for option in OPTIONS:
            if not config.has_option(section, option):
                print >> sys.stderr, "no option [%s] in section [%s]!" % (
                    option, section)
                sys.exit(1)

            channel_info[option] = config.get(section, option)

        channel_list.append(channel_info)

    return channel_list
Esempio n. 24
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(('_tivo_', 'logger_', 'handler_',
                                          'formatter_'))
                      or section in ('Server', 'loggers', 'handlers',
                                     'formatters')
              )
    ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server('nosettings', 'false').lower() in ['false', 'no', 'off']:
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_server('togo_path'):    
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 25
0
def main(loop: Optional[asyncio.AbstractEventLoop] = None) -> int:
    parser = argparse.ArgumentParser(
        description="PyPI PEP 381 mirroring client.")
    parser.add_argument("--version",
                        action="version",
                        version=f"%(prog)s {bandersnatch.__version__}")
    parser.add_argument(
        "-c",
        "--config",
        default="/etc/bandersnatch.conf",
        help="use configuration file (default: %(default)s)",
    )
    parser.add_argument(
        "--debug",
        action="store_true",
        default=False,
        help="Turn on extra logging (DEBUG level)",
    )

    subparsers = parser.add_subparsers()
    _delete_parser(subparsers)
    _mirror_parser(subparsers)
    _verify_parser(subparsers)

    if len(sys.argv) < 2:
        parser.print_help()
        parser.exit()

    args = parser.parse_args()

    bandersnatch.log.setup_logging(args)

    # Prepare default config file if needed.
    config_path = Path(args.config)
    if not config_path.exists():
        logger.warning(
            f"Config file '{args.config}' missing, creating default config.")
        logger.warning(
            "Please review the config file, then run 'bandersnatch' again.")

        default_config_path = Path(__file__).parent / "default.conf"
        try:
            shutil.copy(default_config_path, args.config)
        except IOError as e:
            logger.error(f"Could not create config file: {e}")
        return 1

    config = bandersnatch.configuration.BandersnatchConfig(
        config_file=args.config).config

    if config.has_option("mirror", "log-config"):
        logging.config.fileConfig(str(Path(config.get("mirror",
                                                      "log-config"))))

    # TODO: Go to asyncio.run() when >= 3.7
    loop = loop or asyncio.get_event_loop()
    try:
        return loop.run_until_complete(async_main(args, config))
    finally:
        loop.close()
 def init_cache(self, config):
     self.cache_model = None
     if self.no_cache or self.client_config['resume']:
         return
     if not config.has_option('cache', 'table'):
         return
     cache_table = config.get('cache', 'table')
     if not cache_table:
         return
     logger.info("Init cache table `%s`", cache_table)
     self.cache_model = CacheTendersModel
     self.cache_model._meta.database = self.database
     self.cache_model._meta.db_table = cache_table
     self.cache_max_size = 0xfff0
     self.cache_hit_count = 0
     self.cache_miss_count = 0
     try:
         self.cache_model.select().get()
         cache_table_exists = True
     except CacheTendersModel.DoesNotExist:
         cache_table_exists = True
     except peewee.DatabaseError:
         cache_table_exists = False
         self.database.rollback()
     if self.drop_cache and cache_table_exists:
         logger.warning("Drop cache table `%s`", cache_table)
         self.cache_model.drop_table()
         cache_table_exists = False
     if not cache_table_exists:
         logger.info("Create cache table `%s`", cache_table)
         self.cache_model.create_table()
Esempio n. 27
0
def get_bin(fname):
    global bin_paths

    logger = logging.getLogger("pyTivo.config")

    if fname in bin_paths:
        return bin_paths[fname]

    if config.has_option("Server", fname):
        fpath = config.get("Server", fname)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath
        else:
            logger.error("Bad %s path: %s" % (fname, fpath))

    if sys.platform == "win32":
        fext = ".exe"
    else:
        fext = ""

    for path in [os.path.join(os.path.dirname(__file__), "bin")] + os.getenv("PATH").split(os.pathsep):
        fpath = os.path.join(path, fname + fext)
        if os.path.exists(fpath) and os.path.isfile(fpath):
            bin_paths[fname] = fpath
            return fpath

    logger.warn("%s not found" % fname)
    return None
Esempio n. 28
0
def main(loop: Optional[asyncio.AbstractEventLoop] = None) -> int:
    parser = _make_parser()
    if len(sys.argv) < 2:
        parser.print_help()
        parser.exit()

    args = parser.parse_args()

    bandersnatch.log.setup_logging(args)

    # Prepare default config file if needed.
    config_path = Path(args.config)
    if not config_path.exists():
        logger.warning(
            f"Config file '{args.config}' missing, creating default config.")
        logger.warning(
            "Please review the config file, then run 'bandersnatch' again.")

        default_config_path = Path(__file__).parent / "default.conf"
        try:
            shutil.copy(default_config_path, args.config)
        except OSError as e:
            logger.error(f"Could not create config file: {e}")
        return 1

    config = bandersnatch.configuration.BandersnatchConfig(
        config_file=args.config).config

    if config.has_option("mirror", "log-config"):
        logging.config.fileConfig(str(Path(config.get("mirror",
                                                      "log-config"))))

    if loop:
        loop.set_debug(args.debug)
    return asyncio.run(async_main(args, config))
Esempio n. 29
0
def extract_agent_config(config):
    # get merged into the real agentConfig
    agentConfig = {}

    backend = config.get('Main', 'service_discovery_backend')
    agentConfig['service_discovery'] = True

    conf_backend = None
    if config.has_option('Main', 'sd_config_backend'):
        conf_backend = config.get('Main', 'sd_config_backend')

    if backend not in SD_BACKENDS:
        log.error("The backend {0} is not supported. "
                  "Service discovery won't be enabled.".format(backend))
        agentConfig['service_discovery'] = False

    if conf_backend is None:
        log.warning('No configuration backend provided for service discovery. '
                    'Only auto config templates will be used.')
    elif conf_backend not in SD_CONFIG_BACKENDS:
        log.error("The config backend {0} is not supported. "
                  "Only auto config templates will be used.".format(conf_backend))
        conf_backend = None
    agentConfig['sd_config_backend'] = conf_backend

    additional_config = extract_sd_config(config)
    agentConfig.update(additional_config)
    return agentConfig
Esempio n. 30
0
def getShares(tsn=""):
    shares = [
        (section, dict(config.items(section)))
        for section in config.sections()
        if not (
            section.startswith("_tivo_")
            or section.startswith("logger_")
            or section.startswith("handler_")
            or section.startswith("formatter_")
            or section in ("Server", "loggers", "handlers", "formatters")
        )
    ]

    tsnsect = "_tivo_" + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, "shares"):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, "shares").split(","):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, dict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server("nosettings", "false").lower() in ["false", "no", "off"]:
        shares.append(("Settings", {"type": "settings"}))
    if get_server("tivo_mak") and get_server("togo_path"):
        shares.append(("ToGo", {"type": "togo"}))

    return shares
Esempio n. 31
0
def alt(section, name, default, local_val=None):
    """Provide mechanism to alternate between config, local override 
    and default."""
    if local_val is not None:
        return local_val
    if config.has_option(section, name):
        return config.get(section, name)
    return default
Esempio n. 32
0
def get_togo(name, default=None):
    if config.has_option('togo', name):
        return config.get('togo', name)

    # many togo options used to be in the server section with
    # the name prefixed w/ 'togo_', so check for those values
    # before returning the default
    return get_server('togo_{}'.format(name), default)
Esempio n. 33
0
def get_mon_api_config(config):
    mon_api_config = {'is_enabled': False,
                      'url': '',
                      'project_name': '',
                      'project_id': '',
                      'project_domain_name': '',
                      'project_domain_id': '',
                      'ca_file': '',
                      'insecure': '',
                      'username': '',
                      'password': '',
                      'use_keystone': True,
                      'keystone_url': '',
                      'dimensions': None,
                      'max_buffer_size': 1000,
                      'backlog_send_rate': 5}

    if config.has_option("Main", "dimensions"):
        # parse comma separated dimensions into a dimension list
        try:
            dim_list = [dim.split(':') for dim in config.get('Main', 'dimensions').split(',')]
            mon_api_config['dimensions'] = {key.strip(): value.strip() for key, value in dim_list}
        except ValueError:
            mon_api_config['dimensions'] = {}

    if config.has_section("Api"):
        options = {"url": config.get,
                   "project_name": config.get,
                   "project_id": config.get,
                   "project_domain_name": config.get,
                   "project_domain_id": config.get,
                   "ca_file": config.get,
                   "insecure": config.get,
                   "username": config.get,
                   "password": config.get,
                   "use_keystone": config.getboolean,
                   "keystone_url": config.get,
                   "max_buffer_size": config.getint,
                   "backlog_send_rate": config.getint,
                   "amplifier": config.getint}

        for name, func in options.iteritems():
            if config.has_option("Api", name):
                mon_api_config[name] = func("Api", name)

    return mon_api_config
Esempio n. 34
0
def setup_logging(config):
    if config.has_option('ircbot', 'log_config'):
        log_config = config.get('ircbot', 'log_config')
        fp = os.path.expanduser(log_config)
        if not os.path.exists(fp):
            raise Exception("Unable to read logging config file at %s" % fp)
        logging.config.fileConfig(fp)
    else:
        logging.basicConfig(level=logging.DEBUG)
Esempio n. 35
0
def extract_sd_config(config):
    """Extract configuration about service discovery for the agent"""
    sd_config = {}
    if config.has_option('Main', 'sd_config_backend'):
        sd_config['sd_config_backend'] = config.get('Main', 'sd_config_backend')
    else:
        sd_config['sd_config_backend'] = None
    if config.has_option('Main', 'sd_template_dir'):
        sd_config['sd_template_dir'] = config.get('Main', 'sd_template_dir')
    else:
        sd_config['sd_template_dir'] = SD_TEMPLATE_DIR
    if config.has_option('Main', 'sd_backend_host'):
        sd_config['sd_backend_host'] = config.get('Main', 'sd_backend_host')
    if config.has_option('Main', 'sd_backend_port'):
        sd_config['sd_backend_port'] = config.get('Main', 'sd_backend_port')
    if config.has_option('Main', 'sd_jmx_enable'):
        sd_config['sd_jmx_enable'] = config.get('Main', 'sd_jmx_enable')
    return sd_config
Esempio n. 36
0
def create_logdirs(configfile):
    config = SafeConfigParser()
    config.read(configfile)

    if config.has_option('handlers', 'keys'):
        handlers = config.get('handlers', 'keys').split(',')
        for handler in handlers:
            section = 'handler_' + handler
            if config.has_option(section, 'class') and config.get(section, 'class') in ('FileHandler', 'handlers.RotatingFileHandler', 'handlers.TimedRotatingFileHandler'):
                if config.has_option(section, 'args'):
                    try:
                        args = eval(config.get(section, 'args'))
                    except Exception:
                        continue
                    if isinstance(args, tuple) and len(args) > 0:
                        dir = dirname(args[0])
                        if not exists(dir):
                            makedirs(dir)
Esempio n. 37
0
def getConfiguration(cfgfile=None, config_required={'Main': {'key1': 'value1', 'key2': 'value2'}}):
    '''
    read an ini configuration file and return a dictionary of key/value pairs
    update configuration file if missing any sections
    accepts: 
        cfgfile - path to configuration file
        config_required - nested dictionary in the following format:
        {'Section1':
            {'key1': 'value1', 'key2': 'value2'},
            
         'Section 2':
            {'key1': 'value1'}
        }
    '''
    if not cfgfile:
        raise ValueError('no configuration file specified')
    # required configuraiton options
    # Section: {'option': 'default value'}
    logger = logging.getLogger(__name__)
    logger.debug('getting configuration from file: {}'.format(cfgfile))
    cfgpath = os.path.dirname(cfgfile)
#     config_required = {
#         'Main': {'credentials': os.path.join(cfgpath, 'credentials/'), 
#                  },
#         }

    config = configuration.get_config(cfgfile)

    update_config = False

    logger.debug('checking sections')
    for section, values in list(config_required.items()):
        if not config.has_section(section):
            logger.warning('section: {} not found in {}'.format(section, cfgfile))
            logger.debug('adding section {}'.format(section))
            config.add_section(section)
            update_config = True
        for option, value in list(values.items()):
            if not config.has_option(section, option):
                logger.warning('option: {} not found in {}'.format(option, cfgfile))
                logger.debug('adding option {}: {}'.format(option, value))

                config.set(section, option, value)
                update_config = True


    # for section, options in config_required.items():

    if update_config:
        try:
            logger.debug('updating configuration file at: {}'.format(cfgfile))
            configuration.create_config(cfgfile, config)
        except Exception as e:
            logger.error(e)
            
    return(config)
Esempio n. 38
0
def getIsExternal(tsn):
    tsnsect = '_tivo_' + tsn
    if tsnsect in config.sections():
        if config.has_option(tsnsect, 'external'):
            try:
                return config.getboolean(tsnsect, 'external')
            except ValueError:
                pass

    return False
Esempio n. 39
0
def getIsExternal(tsn):
    tsnsect = "_tivo_" + tsn
    if tsnsect in config.sections():
        if config.has_option(tsnsect, "external"):
            try:
                return config.getboolean(tsnsect, "external")
            except ValueError:
                pass

    return False
Esempio n. 40
0
def add_syslog_handler(logger):
    
  syslog_enabled = config.has_option("logging","syslog_enabled") and (int(config.get("logging","syslog_enabled")) == 1)
      
  #add syslog handler if we are on linux and syslog is enabled in ambari config
  if syslog_enabled and IS_LINUX:
    logger.info("Adding syslog handler to ambari agent logger")
    syslog_handler = SysLogHandler(address="/dev/log",
                                   facility=SysLogHandler.LOG_LOCAL1)
        
    syslog_handler.setFormatter(SYSLOG_FORMATTER)
    logger.addHandler(syslog_handler)
Esempio n. 41
0
def create_logdirs(configfile):
    config = SafeConfigParser()
    config.read(configfile)

    if config.has_option('handlers', 'keys'):
        handlers = config.get('handlers', 'keys').split(',')
        for handler in handlers:
            section = 'handler_' + handler
            if config.has_option(section, 'class') and config.get(
                    section,
                    'class') in ('FileHandler', 'handlers.RotatingFileHandler',
                                 'handlers.TimedRotatingFileHandler'):
                if config.has_option(section, 'args'):
                    try:
                        args = eval(config.get(section, 'args'))
                    except Exception:
                        continue
                    if isinstance(args, tuple) and len(args) > 0:
                        dir = dirname(args[0])
                        if not exists(dir):
                            makedirs(dir)
Esempio n. 42
0
def add_syslog_handler(logger):
    
  syslog_enabled = config.has_option("logging","syslog_enabled") and (int(config.get("logging","syslog_enabled")) == 1)
      
  #add syslog handler if we are on linux and syslog is enabled in ambari config
  if syslog_enabled and IS_LINUX:
    logger.info("Adding syslog handler to ambari agent logger")
    syslog_handler = SysLogHandler(address="/dev/log",
                                   facility=SysLogHandler.LOG_LOCAL1)
        
    syslog_handler.setFormatter(SYSLOG_FORMATTER)
    logger.addHandler(syslog_handler)
def generate_certificate(dir_name, file_name, key_length, key_type="rsa"):
    """Create self-signed cert and key files

        Args:
            dir_name: name of the directory to store the files
            file_name: name of the files that will be created. It will append
                .crt to certificate file and .key to key file
            key_length: key length in bits
            key_type: crypto type: RSA or DSA; defaults to RSA
        Returns:
            Nothing
        Exceptions:
            Raise exceptions on error
    """

    config = globals()['config']
    private_key = OpenSSL.crypto.PKey()
    if key_type == "rsa":
        private_key.generate_key(OpenSSL.crypto.TYPE_RSA, key_length)
    elif key_type == "dsa":
        private_key.generate_key(OpenSSL.crypto.TYPE_DSA, key_length)
    else:
        message = "Invalid key_type"
        logging.error(message)
        raise OneViewRedfishError(message)

    if not config.has_option("ssl-cert-defaults", "commonName"):
        config["ssl-cert-defaults"]["commonName"] = get_ip()

    cert = OpenSSL.crypto.X509()
    cert_subject = cert.get_subject()

    cert_defaults = dict(config.items("ssl-cert-defaults"))

    for key, value in cert_defaults.items():
        setattr(cert_subject, key, value)

    cert.set_serial_number(1)
    cert.gmtime_adj_notBefore(0)
    cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
    cert.set_issuer(cert.get_subject())
    cert.set_pubkey(private_key)
    cert.sign(private_key, "sha1")

    # Save Files
    with open(os.path.join(dir_name, file_name + ".crt"), "wt") as f:
        f.write(
            OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
                                            cert).decode("UTF-8"))
    with open(os.path.join(dir_name, file_name + ".key"), "wt") as f:
        f.write(
            OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
                                           private_key).decode("UTF-8"))
Esempio n. 44
0
    def read_config(self):
        if self.config:
            return

        config = ConfigParser.RawConfigParser()
        config.read(self.config_file)

        for s in config.sections():
            if s == 'global':
                if config.has_option(s, 'poll_interval'):
                    self.poll_interval = int(config.get(s, 'poll_interval'))
                if config.has_option(s, 'newrelic_license_key'):
                    self.license_key = config.get(s, 'newrelic_license_key')
                continue
            if not config.has_option(s, 'name') or not config.has_option(
                    s, 'url'):
                continue
            ns = NginxApiCollector(s, config.get(s, 'name'),
                                   config.get(s, 'url'), self.poll_interval)
            if config.has_option(s, 'http_user') and config.has_option(
                    s, 'http_pass'):
                ns.basic_auth = base64.b64encode(
                    config.get(s, 'http_user') + b':' +
                    config.get(s, 'http_pass'))
            self.sources.append(ns)
        self.config = config
Esempio n. 45
0
def extract_sd_config(config):
    """Extract configuration about service discovery for the agent"""
    sd_config = {}
    if config.has_option('Main', 'sd_config_backend'):
        sd_config['sd_config_backend'] = config.get('Main', 'sd_config_backend')
    else:
        sd_config['sd_config_backend'] = None
    if config.has_option('Main', 'sd_template_dir'):
        sd_config['sd_template_dir'] = config.get(
            'Main', 'sd_template_dir')
    else:
        sd_config['sd_template_dir'] = SD_TEMPLATE_DIR
    if config.has_option('Main', 'sd_backend_host'):
        sd_config['sd_backend_host'] = config.get(
            'Main', 'sd_backend_host')
    if config.has_option('Main', 'sd_backend_port'):
        sd_config['sd_backend_port'] = config.get(
            'Main', 'sd_backend_port')
    if config.has_option('Main', 'sd_jmx_enable'):
        sd_config['sd_jmx_enable'] = config.get(
            'Main', 'sd_jmx_enable')
    return sd_config
Esempio n. 46
0
def perform_prestart_checks(expected_hostname):
    # Check if current hostname is equal to expected one (got from the server
    # during bootstrap.
    global config

    if expected_hostname is not None:
        current_hostname = hostname.hostname(config)
        if current_hostname != expected_hostname:
            print(
                "Determined hostname does not match expected. Please check agent "
                "log for details")
            msg = "Ambari agent machine hostname ({0}) does not match expected ambari " \
                  "server hostname ({1}). Aborting registration. Please check hostname, " \
                  "hostname -f and /etc/hosts file to confirm your " \
                  "hostname is setup correctly".format(current_hostname, expected_hostname)
            logger.error(msg)
            sys.exit(1)
    # Check if there is another instance running
    if os.path.isfile(
            ProcessHelper.pidfile
    ) and not OSCheck.get_os_family() == OSConst.WINSRV_FAMILY:
        print("%s already exists, exiting" % ProcessHelper.pidfile)
        sys.exit(1)
    # check if ambari prefix exists
    elif config.has_option('agent', 'prefix') and not os.path.isdir(
            os.path.abspath(config.get('agent', 'prefix'))):
        msg = "Ambari prefix dir %s does not exists, can't continue" \
              % config.get("agent", "prefix")
        logger.error(msg)
        print(msg)
        sys.exit(1)
    elif not config.has_option('agent', 'prefix'):
        msg = "Ambari prefix dir %s not configured, can't continue"
        logger.error(msg)
        print(msg)
        sys.exit(1)

    check_sudo()
Esempio n. 47
0
    def stop(self):
        """ Stops all running services and itself """

        for key in self.services:
            service = self.services[key]
            if not config.has_option("Sockets", "keyinput") or service.socketName != config.get("Sockets", "keyinput"):
                try:
                    service.service.running = False
                except: 
                    self.logger.debug(traceback.format_exc())
                self.logger.info("Terminating socket: " + service.socketName)
                service.socket.send_json({"head" : "system_message" , "data" : "stop"})
        
        self.running = False
Esempio n. 48
0
def get_zc():
    opt = get_server('zeroconf', 'auto').lower()

    if opt == 'auto':
        for section in config.sections():
            if section.startswith('_tivo_'):
                if config.has_option(section, 'shares'):
                    logger = logging.getLogger('pyTivo.config')
                    logger.info('Shares security in use -- zeroconf disabled')
                    return False
    elif opt in ['false', 'no', 'off']:
        return False

    return True
Esempio n. 49
0
  def read_settings(self):
    ''' Reads the settings from the consul.ini file '''
    config = ConfigParser.SafeConfigParser()
    config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul.ini')

    config_options = ['host', 'token', 'datacenter', 'servers_suffix',
                      'tags', 'kv_metadata', 'kv_groups', 'availability',
                      'unavailable_suffix', 'available_suffix', 'url',
                      'domain']
    for option in config_options:
      value = None
      if config.has_option('consul', option):
          value = config.get('consul', option)
      setattr(self, option, value)
Esempio n. 50
0
  def read_settings(self):
    ''' Reads the settings from the consul.ini file '''
    config = ConfigParser.SafeConfigParser()
    config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul.ini')

    config_options = ['host', 'token', 'datacenter', 'servers_suffix',
                      'tags', 'kv_metadata', 'kv_groups', 'availability',
                      'unavailable_suffix', 'available_suffix', 'url',
                      'domain']
    for option in config_options:
      value = None
      if config.has_option('consul', option):
          value = config.get('consul', option)
      setattr(self, option, value)
Esempio n. 51
0
def parse_config_for_client_initiated(args):
    """
    Parse connector configuration and generate a result in dictionary
    format which includes integration names and the required info for
    pulling out data
    """
    connectors = {}
    try:
        config = SpecialConfigParser()
        if not os.path.isfile(args.ini):
            # The config file does not yet exist, so generate it.
            # generate_ini_file(args)
            raise ConfigError("Error: unable to open ini file: %r" % args.ini)

        config.read(args.ini)
        for section in config.sections():
            if section == 'converters':
                for name, filter_str in config.items('converters'):
                    DynamicConverter(name, filter_str)
            elif section == 'oomnitza' or config.has_option(
                    section, 'enable') and config.getboolean(
                        section, 'enable'):
                if not connectors and section != 'oomnitza':
                    raise ConfigError(
                        "Error: [oomnitza] must be the first section in the ini file."
                    )

                cfg = init_connector_from_configuration(
                    section, config.items(section), args)
                connectors[section] = cfg
            else:
                LOG.debug("Skipping connector '%s' as it is not enabled.",
                          section)
                pass
    except IOError:
        raise ConfigError("Could not open config file.")

    if len(connectors) <= 1:
        raise ConfigError("No connectors have been enabled.")

    if args.show_mappings:
        for name, connector in connectors.items():
            if name == 'oomnitza':
                continue
            print(connector["__connector__"].section, "Mappings")
            pprint.pprint(connector["__connector__"].field_mappings)
        exit(0)

    return connectors
def user_update(username):
    if not config: raise ValueError('Configuration not loaded')
    if not config.has_section('users'):
        config.add_section('users')
    if config.has_option('users', username):
        print 'Changing password for %s' % username
    else:
        print 'Adding new user %s' % username
    password = getpass.getpass('Password: '******' * Updating OFTG-Ninja config ...'
        return True
    else:
        print ' * Failed to update user'
        return False
Esempio n. 53
0
def save_attachment(config, address, attachment):
    sandbox = config.get('DEFAULT', 'sandbox')

    code = config.get(address, 'code')

    seq = 1
    if config.has_option(address, 'sequence'):
        seq = config.getint(address, 'sequence') + 1

    filename = os.path.join(sandbox, '{}_{}.xlsx'.format(code, seq))

    with open(filename, 'wb') as f:
        f.write(attachment)
        f.close()

    return seq, filename
Esempio n. 54
0
def get169Setting(tsn):
    if not tsn:
        return True

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect):
        if config.has_option(tsnsect, 'aspect169'):
            try:
                return config.getboolean(tsnsect, 'aspect169')
            except ValueError:
                pass

    if get169Blacklist(tsn) or get169Letterbox(tsn):
        return False

    return True
Esempio n. 55
0
def _main(config):
    setup_logging(config)

    fp = config.get('ircbot', 'channel_config')
    if fp:
        fp = os.path.expanduser(fp)
        if not os.path.exists(fp):
            raise Exception("Unable to read layout config file at %s" % fp)
    else:
        raise Exception("Channel Config must be specified in config file.")

    try:
        channel_config = ChannelConfig(yaml.load(open(fp)))
    except Exception:
        log = logging.getLogger('gerritbot')
        log.exception("Syntax error in chanel config file")
        raise

    bot = GerritBot(channel_config.channels,
                    config.get('ircbot', 'nick'),
                    config.get('ircbot', 'pass'),
                    config.get('ircbot', 'server'),
                    config.getint('ircbot', 'port'),
                    config.getboolean('ircbot', 'force_ssl'),
                    config.get('ircbot', 'server_password'))
    if config.has_option('ircbot', 'use_mqtt'):
        use_mqtt = config.getboolean('ircbot', 'use_mqtt')
    else:
        use_mqtt = False

    if use_mqtt:
        g = GerritMQTT(bot,
                       channel_config,
                       config.get('mqtt', 'host'),
                       config.get('mqtt', 'base_topic'),
                       config.getint('mqtt', 'port'),
                       config.getboolean('mqtt', 'websocket'))
    else:
        g = Gerrit(bot,
                   channel_config,
                   config.get('gerrit', 'host'),
                   config.get('gerrit', 'user'),
                   config.getint('gerrit', 'port'),
                   config.get('gerrit', 'key'))
    g.start()
    bot.start()
Esempio n. 56
0
    def read_settings(self):
        ''' Reads the settings from the consul_io.ini file (or consul.ini for backwards compatibility)'''
        config = configparser.SafeConfigParser()
        if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '/consul_io.ini'):
            config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul_io.ini')
        else:
            config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul.ini')

        config_options = ['host', 'token', 'datacenter', 'servers_suffix',
                          'tags', 'kv_metadata', 'kv_groups', 'availability',
                          'unavailable_suffix', 'available_suffix', 'url',
                          'domain', 'suffixes', 'bulk_load']
        for option in config_options:
            value = None
            if config.has_option('consul', option):
                value = config.get('consul', option).lower()
            setattr(self, option, value)
Esempio n. 57
0
def main():
    if len(sys.argv) != 2:
        print("Usage: %s CONFIGFILE" % sys.argv[0])
        sys.exit(1)

    config = ConfigParser.ConfigParser()
    config.read(sys.argv[1])

    pid_path = ""
    if config.has_option('ircbot', 'pid'):
        pid_path = config.get('ircbot', 'pid')
    else:
        pid_path = "/var/run/gerritbot/gerritbot.pid"

    pid = pid_file_module.TimeoutPIDLockFile(pid_path, 10)
    with daemon.DaemonContext(pidfile=pid):
        _main(config)
Esempio n. 58
0
def getConfiguration(cfgfile):
    # required configuraiton options
    # Section: {'option': 'default value'}
    logger = logging.getLogger(__name__)
    logger.debug('getting configuration from file: {}'.format(cfgfile))
    cfgpath = os.path.dirname(cfgfile)
    config_required = {
        'Main': {
            'credentials': os.path.join(cfgpath, 'credentials/'),
        },
    }

    config = configuration.get_config(cfgfile)

    update_config = False

    logger.debug('checking sections')
    for section, values in list(config_required.items()):
        if not config.has_section(section):
            logger.warning('section: {} not found in {}'.format(
                section, cfgfile))
            logger.debug('adding section {}'.format(section))
            config.add_section(section)
            update_config = True
        for option, value in list(values.items()):
            if not config.has_option(section, option):
                logger.warning('option: {} not found in {}'.format(
                    option, cfgfile))
                logger.debug('adding option {}: {}'.format(option, value))

                config.set(section, option, value)
                update_config = True

    # for section, options in config_required.items():

    if update_config:
        try:
            logger.debug('updating configuration file at: {}'.format(cfgfile))
            configuration.create_config(cfgfile, config)
        except Exception as e:
            logger.error(e)

    return (config)
Esempio n. 59
0
def determineGear(ex):
    '''
    determines gear based on the type of workout and if workout is a Run it looks at the category to determine shoes.
    Returns the determined Gear
    '''
    gear = ''
    try:
        if ex.type.lower() == 'running':
            if config.has_option(
                    'gear', 'shoe_' + ex.category.replace(' ', '_').lower()):
                gear = config['gear']['shoe_' +
                                      ex.category.replace(' ', '_').lower()]
            else:
                gear = config['gear']['shoe_default']
        else:
            gear = config['gear']['default_' + ex.type]
    except:
        gear = ''

    return gear
Esempio n. 60
0
    def _controllers(self, only_ip=False):
        from configuration import config

        controllers = []
        if config.has_option('General', 'ip_control_dns_name'):
            control_domain = config.get('General', 'ip_control_dns_name')
            self_ip = netaddr.IPNetwork(self.bind_ip)
            for answer in dns.resolver.query(control_domain, 'A'):
                ip = netaddr.IPNetwork(answer.to_text())
                if ip == self_ip:
                    # Ignore itself
                    continue
                ip = ip.ip
                if not only_ip:
                    try:
                        controllers.append(
                            jsonrpclib.Server("http://{}:{}/".format(
                                ip, self.bind_port)))
                    except:
                        logging.warning("Could not connect to controller %s.",
                                        ip)
                else:
                    controllers.append(ip)
        return controllers