Esempio n. 1
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(('_tivo_', 'logger_', 'handler_',
                                          'formatter_')) or section in
                      ('Server', 'loggers', 'handlers', 'formatters'))]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server('nosettings', 'false').lower() in ['false', 'no', 'off']:
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_server('togo_path'):
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 2
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(('_tivo_', 'logger_', 'handler_',
                                          'formatter_'))
                      or section in ('Server', 'loggers', 'handlers',
                                     'formatters')
              )
    ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server('nosettings', 'false').lower() in ['false', 'no', 'off']:
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_server('togo_path'):    
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 3
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(special_section_prefixes)
                      or section in special_section_names)
             ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if not config.getboolean('Server', 'nosettings', fallback=False):
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_togo('path'):
        shares.append(('ToGo', {'type': 'togo'}))

    return shares
Esempio n. 4
0
def getShares(tsn=''):
    shares = [(section, Bdict(config.items(section)))
              for section in config.sections()
              if not (section.startswith(('_tivo_', 'logger_', 'handler_',
                                          'formatter_'))
                      or section in ('Server', 'loggers', 'handlers',
                                     'formatters')
              )
    ]

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, 'shares'):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, 'shares').split(','):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, Bdict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server('nosettings', 'false').lower() in ['false', 'no', 'off']:
        shares.append(('Settings', {'type': 'settings'}))
    if get_server('tivo_mak') and get_server('togo_path'):    
        shares.append(('ToGo', {'type': 'togo'}))

    if sys.platform == 'win32':
        shares.append(('VRD', {'type': 'vrd'}))

    if getattr(sys, 'frozen', False):
        shares.append(('Desktop', {'type': 'desktop', 'path': os.path.join(sys._MEIPASS, 'plugins', 'desktop', 'content')}))

    return shares
Esempio n. 5
0
    def register_schedule(self):
        channel = config.get('irc', 'channel')

        for schedule in config.getlist('irc', 'schedules'):
            sect = ':'.join(('schedule', schedule))

            # do not evaluate isenabled() here.
            # if it does, the disabled action will never be scheduled.
            if not config.has_section(sect):
                logging.error('[schedule] [%s] no such schedule', sect)
                continue

            if not config.has_option(sect, 'action'):
                logging.error('[schedule] [%s] no action specified', sect)
                continue

            action = ':'.join(('action', config.get(sect, 'action')))
            if not config.has_section(action):
                logging.error('[schedule] [%s] invalid action specified', sect)
                continue

            interval = config.getint(sect, 'interval')
            if interval < 60:
                logging.error('[schedule] [%s] interval too short', sect)
                continue

            self.reactor.register_schedule(interval, self.do_action,
                                           action, self.connection,
                                           None, {'target': channel}, sect)
            logging.info('[schedule] [%s] registered', sect)
    def create_models(self, config):
        self.models = collections.OrderedDict()
        self.main_model = None
        for section in config.sections():
            if section.startswith('table:'):
                table_schema = config.items(section)
                table, name = section.split(':', 2)
                filter_section = 'filter:' + name
                filters = []
                if config.has_section(filter_section):
                    filters = config.items(filter_section)
                index_section = 'index:' + name
                index_schema = []
                if config.has_section(index_section):
                    index_schema = config.items(index_section)
                self.init_model(name, table_schema, index_schema, filters)

        self.sorted_models = sorted(
            self.models.values(),
            key=lambda m: len(m._meta.table_options.get('__iter__', [])))

        if self.client_config['resume'] and not self.main_model:
            raise ValueError('Main model is required for resume mode')
        if not self.main_model:
            logger.warning('Main model is not defined in config')
Esempio n. 7
0
def getShares(tsn=""):
    shares = [
        (section, dict(config.items(section)))
        for section in config.sections()
        if not (
            section.startswith("_tivo_")
            or section.startswith("logger_")
            or section.startswith("handler_")
            or section.startswith("formatter_")
            or section in ("Server", "loggers", "handlers", "formatters")
        )
    ]

    tsnsect = "_tivo_" + tsn
    if config.has_section(tsnsect) and config.has_option(tsnsect, "shares"):
        # clean up leading and trailing spaces & make sure ref is valid
        tsnshares = []
        for x in config.get(tsnsect, "shares").split(","):
            y = x.strip()
            if config.has_section(y):
                tsnshares.append((y, dict(config.items(y))))
        shares = tsnshares

    shares.sort()

    if get_server("nosettings", "false").lower() in ["false", "no", "off"]:
        shares.append(("Settings", {"type": "settings"}))
    if get_server("tivo_mak") and get_server("togo_path"):
        shares.append(("ToGo", {"type": "togo"}))

    return shares
Esempio n. 8
0
def get_config(service_conf, section=''):
    config = ConfigParser.ConfigParser()
    config.read(service_conf)

    conf_items = dict(config.items('common')) if config.has_section('common') else {}
    if section and config.has_section(section):
       conf_items.update(config.items(section))
    return conf_items
Esempio n. 9
0
def get_config(service_conf, section=''):
    config = ConfigParser.ConfigParser()
    config.read(service_conf)

    conf_items = dict(config.items('common')) if config.has_section('common') else {}
    if section and config.has_section(section):
       conf_items.update(config.items(section))
    return conf_items
Esempio n. 10
0
def get_config(section=''):
    config = ConfigParser.ConfigParser()
    service_conf = os.path.join(work_dir,'conf/service.conf')
    config.read(service_conf)
    config_items = dict(config.items('common')) if config.has_section('common') else {}
    if section and config.has_section(section):
        config_items.update(config.items(section))
    return config_items    
Esempio n. 11
0
def init_logging():
    if config.has_section("loggers") and config.has_section("handlers") and config.has_section("formatters"):

        logging.config.fileConfig(config_files)

    elif getDebug():
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)
Esempio n. 12
0
def init_logging():
    if (config.has_section('loggers') and config.has_section('handlers')
            and config.has_section('formatters')):

        logging.config.fileConfig(config_files)

    elif getDebug():
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)
Esempio n. 13
0
def get_logging_config(cfg_path=None):
    logging_config = {
        'log_level': None,
        'collector_log_file': '/var/log/datadog/collector.log',
        'forwarder_log_file': '/var/log/datadog/forwarder.log',
        'dogstatsd_log_file': '/var/log/datadog/dogstatsd.log',
        'pup_log_file': '/var/log/datadog/pup.log',
        'log_to_syslog': True,
        'syslog_host': None,
        'syslog_port': None,
    }

    config_path = get_config_path(cfg_path, os_name=getOS())
    config = ConfigParser.ConfigParser()
    config.readfp(skip_leading_wsp(open(config_path)))

    if config.has_section('handlers') or config.has_section('loggers') or config.has_section('formatters'):
        sys.stderr.write("Python logging config is no longer supported and will be ignored.\nTo configure logging, update the logging portion of 'datadog.conf' to match:\n  'https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example'.\n")

    for option in logging_config:
        if config.has_option('Main', option):
            logging_config[option] = config.get('Main', option)

    levels = {
        'CRITICAL': logging.CRITICAL,
        'DEBUG': logging.DEBUG,
        'ERROR': logging.ERROR,
        'FATAL': logging.FATAL,
        'INFO': logging.INFO,
        'WARN': logging.WARN,
        'WARNING': logging.WARNING,
    }
    if config.has_option('Main', 'log_level'):
        logging_config['log_level'] = levels.get(config.get('Main', 'log_level'))

    if config.has_option('Main', 'log_to_syslog'):
        logging_config['log_to_syslog'] = config.get('Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'syslog_host'):
        host = config.get('Main', 'syslog_host').strip()
        if host:
            logging_config['syslog_host'] = host
        else:
            logging_config['syslog_host'] = None

    if config.has_option('Main', 'syslog_port'):
        port = config.get('Main', 'syslog_port').strip()
        try:
            logging_config['syslog_port'] = int(port)
        except:
            logging_config['syslog_port'] = None

    return logging_config
Esempio n. 14
0
def dict2config(jdict, section):
    config = ConfigParser()
    if not config.has_section(section):
        config.add_section(section)
    for (k, v) in viewitems(jdict):
        config.set(section, k, str(v))
    return config
Esempio n. 15
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ('WARNING: pyTivo.conf does not exist.\n' +
               'Assuming default values.')
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith('_tivo_'):
            tsn = section[6:]
            if tsn.upper() not in ['SD', 'HD']:
                if config.has_option(section, 'name'):
                    tivo_names[tsn] = config.get(section, 'name')
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, 'address'):
                    tivos[tsn] = config.get(section, 'address')

    for section in ['Server', '_tivo_SD', '_tivo_HD']:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 16
0
def get_config(config_file=None, server=None):
    logger.info("Getting config from config file %s" % config_file)
    if config_file is None:
        config_file = '/home/minecraft/minecraft/pyredstone.cfg'
    if not os.path.exists(config_file):
        raise IOError("Could not open config file")
    config = ConfigParser.ConfigParser()
    config.read(config_file)

    if server is None:
        try:
            sections = config.sections()
            logger.debug(sections)
            if len(sections) < 1:
                raise SyntaxError("No sections found in config file")
            elif len(sections) > 1:
                logger.warning("More than one server found, no server specified. Using first server.")
            server = sections[0]
        except ConfigParser.Error as e:
            logger.exception("Could not get sections")
    if not config.has_section(server):
        raise SyntaxError("Server section '%s' of config file does not exist. Cannot continue." % (server, ))

    # Now we have a config file and a section.
    data = {}
    try:
        # Take each item in the config file section and dump into a dict
        for item in config.items(server):
            data[item[0]] = item[1]
        logger.info("Config data: %s" % str(data))
    except ConfigParser.Error as e:
        raise SyntaxError("Config file is improperly formated")
    return data
Esempio n. 17
0
def reset():
    global bin_paths
    global config
    global configs_found
    global tivos_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ('WARNING: pyTivo.conf does not exist.\n' +
               'Assuming default values.')
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith('_tivo_'):
            tsn = section[6:]
            if tsn.upper() not in ['SD', 'HD', '4K']:
                tivos_found = True
                tivos[tsn] = Bdict(config.items(section))

    for section in ['Server', '_tivo_SD', '_tivo_HD', '_tivo_4K']:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 18
0
def dict2config(jdict, section):
    config = ConfigParser()
    if not config.has_section(section):
        config.add_section(section)
    for (k, v) in viewitems(jdict):
        config.set(section, k, str(v))
    return config
Esempio n. 19
0
    def isenabled(sect, data={}):
        if not config.has_section(sect):
            return False

        if not config.getboolean(sect, 'enable'):
            return False

        try:
            per = config.get(sect, 'percentage')             # allow '0'
            if per and int(per) < random.randint(1, 100):
                return False

            time_ = config.get(sect, 'time')
            if time_ and not util.time_in(time_):
                return False

            if 'source' in data:
                pattern = config.get(sect, 'source_pattern')
                data['source_match'] = re.search(pattern, data['source'])
                if not data['source_match']:
                    return False

            if 'message' in data:
                pattern = config.get(sect, 'pattern')
                data['match'] = re.search(pattern, data['message'])
                if not data['match']:
                    return False
        except:
            logging.exception('[%s] %s', sect, data)
            return False

        return True
Esempio n. 20
0
def get_config():
    """Read the configuration file.

    Returns the parameters in dictionary format"""

    section = "hbcal"
    parameters = {
        'input calendar': SingleConfigurationParameter(CALENDAR_TYPES,
                                                       'civil'),
        'dafbind': SingleConfigurationParameter(DAFBIND_TYPES, 'civil'),
        'format': SingleConfigurationParameter(FORMAT_TYPES, 'normal'),
        'output calendar':
            MultiConfigurationParameter(CALENDAR_TYPES,
                                        ['civil', 'hebrew'],
                                        (("julian", "gregorian", "civil"),)),
        'sedrah': BinaryConfigurationParameter(),
        'omer': BinaryConfigurationParameter(),
        'molad': BinaryConfigurationParameter(),
        'israel': BinaryConfigurationParameter(),
    }

    config = RawConfigParser()
    home = path.expanduser("~")
    filename = path.join(home, '.hbcal.config')
    if config.read(filename):
        for key, value in parameters.viewitems():
            value.parameter_found(key, section, config)
    if config.has_section('loggers'):
        logging.config.fileConfig(filename, disable_existing_loggers=True)
    return parameters
Esempio n. 21
0
def set_up_logger_syslog_handler(config):
    handlers = {}
    formatters = {}
    if config.has_section('Logging'):
        if config.has_option('Logging', 'loghost') and \
            config.has_option('Logging', 'logport'):
            log_host = config.get('Logging', 'loghost')
            log_port = int(config.get('Logging', 'logport'))
            log_address = (log_host, log_port)
            formatters['syslog_formatter'] = {
                'format': '%(asctime)s %(name)s: %(levelname)s %(message)s',
                'datefmt': '%b %e %H:%M:%S',
            }
            socktype = socket.SOCK_DGRAM
            if config.has_option('Logging', 'logtcp'):
                if config.getboolean('Logging', 'logtcp'):
                    socktype = socket.SOCK_STREAM
                else:
                    socktype = socket.SOCK_DGRAM
            facility = logging.handlers.SysLogHandler.LOG_USER
            if config.has_option('Logging', 'logfacility'):
                try:
                    facility = logging.handlers.SysLogHandler.facility_names[
                        config.get('Logging', 'logfacility')]
                except KeyError:
                    raise Exception('Invalid "logfacility" value of "%s"' %
                        config.get('Logging', 'logfacility'))
            handlers['syslog'] = {
                'class': 'logging.handlers.SysLogHandler',
                'formatter': 'syslog_formatter',
                'address': log_address,
                'facility': facility,
                'socktype': socktype,
            }
    return handlers, formatters
Esempio n. 22
0
def set_up_logger_file_handler(config):
    handlers = {}
    formatters = {}
    if config.has_section('Logging'):
        # create handler
        # add to logger
        # create formatter
        if config.has_option('Logging', 'logfile'):
            logfile = config.get('Logging', 'logfile')
            if config.has_option('Logging', 'loghistory'):
                loghistory = int(config.get('Logging', 'loghistory'))
            else:
                loghistory = 7
            formatters['file_formatter'] = {
                'format': "%(asctime)s - %(levelname)s - %(message)s",
            }
            handlers['file'] = {
                'class': 'logging.handlers.TimedRotatingFileHandler',
                'formatter': 'file_formatter',
                'filename': logfile,
                'when': 'd',
                'interval': 1,
                'backupCount': loghistory,
            }
    return handlers, formatters
Esempio n. 23
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ("WARNING: pyTivo.conf does not exist.\n" + "Assuming default values.")
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith("_tivo_"):
            tsn = section[6:]
            if tsn.upper() not in ["SD", "HD"]:
                if config.has_option(section, "name"):
                    tivo_names[tsn] = config.get(section, "name")
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, "address"):
                    tivos[tsn] = config.get(section, "address")

    for section in ["Server", "_tivo_SD", "_tivo_HD"]:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 24
0
def get_mon_api_config(config):
    mon_api_config = {'is_enabled': False,
                      'url': '',
                      'project_name': '',
                      'username': '',
                      'password': False,
                      'use_keystone': True,
                      'keystone_url': '',
                      'dimensions': None}

    if config.has_option("Main", "dimensions"):
        # parse comma separated dimensions into a dimension list
        try:
            dim_list = [dim.split(':') for dim in config.get('Main', 'dimensions').split(',')]
            mon_api_config['dimensions'] = {key.strip(): value.strip() for key, value in dim_list}
        except ValueError:
            mon_api_config['dimensions'] = {}

    if config.has_section("Api"):
        options = {"url": config.get,
                   "project_name": config.get,
                   "username": config.get,
                   "password": config.get,
                   "use_keystone": config.getboolean,
                   "keystone_url": config.get}

        for name, func in options.iteritems():
            if config.has_option("Api", name):
                mon_api_config[name] = func("Api", name)

    return mon_api_config
Esempio n. 25
0
def get_config():
    config = configparser.ConfigParser()
    config.read('./conf/diagnose.cnf', encoding='utf-8')
    section_has = config.has_section('default')
    if not section_has:
        sys.exit("Error: The '[default]' not find")
    processing = config.get("default", "processing")
    host = config.get("default", "host")
    user = config.get("default", "user")
    password = config.get("default", "password")
    port = config.get("default", "port")
    database = config.get("default", "database")
    log_level = config.get("default", "log_level")
    type = config.get("default", "type")

    conf_dict = {
        'processing': processing,
        'user': user,
        'host': host,
        'password': password,
        'port': port,
        'database': database,
        'log_level': log_level,
        'type': type
    }
    return conf_dict
 def init_cache(self, config):
     self.cache_model = None
     if self.no_cache or self.client_config['resume']:
         return
     if not config.has_section('cache'):
         return
     cache_config = dict(config.items('cache'))
     cache_table = cache_config.get('table')
     if not cache_table:
         return
     logger.info("Init cache table `%s`", cache_table)
     blob_type = cache_config.get('blob_type', 'BLOB')
     max_size = int(cache_config.get('max_size', 65530))
     CacheTendersModel.gzip_data.field_type = blob_type
     self.cache_model = CacheTendersModel
     self.cache_model._meta.database = self.database
     self.cache_model._meta.table_name = cache_table
     self.cache_max_size = max_size
     self.cache_hit_count = 0
     self.cache_miss_count = 0
     try:
         self.cache_model.select().get()
         cache_table_exists = True
     except CacheTendersModel.DoesNotExist:
         cache_table_exists = True
     except peewee.DatabaseError:
         cache_table_exists = False
         self.database.rollback()
     if self.drop_cache:
         logger.warning("Drop cache table `%s`", cache_table)
         self.cache_model.drop_table(safe=True)
         cache_table_exists = False
     if not cache_table_exists:
         logger.info("Create cache table `%s`", cache_table)
         self.cache_model.create_table(safe=True)
Esempio n. 27
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ('WARNING: pyTivo.conf does not exist.\n' +
               'Assuming default values.')
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith('_tivo_'):
            tsn = section[6:]
            if tsn.upper() not in ['SD', 'HD']:
                if config.has_option(section, 'name'):
                    tivo_names[tsn] = config.get(section, 'name')
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, 'address'):
                    tivos[tsn] = config.get(section, 'address')

    for section in ['Server', '_tivo_SD', '_tivo_HD']:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 28
0
def main():
    home_dir = os.path.expanduser('~')
    program_dir = os.path.abspath(os.path.join(home_dir, '.dbackups'))
    db_config_file = os.path.join(program_dir, 'databases.ini')

    if not os.path.isdir(os.path.join(BASE_DIR, '../logs')):
        os.mkdir(os.path.join(BASE_DIR, '../logs'))

    if not os.path.isfile(db_config_file):
        print('Config File not found. {}'.format(db_config_file))
        sys.exit(1)

    #logging_config = resource_filename(__name__, '../config/logging.ini')
    #logging.config.fileConfig(logging_config)

    logging.basicConfig(level=logging.DEBUG,
                        format='%(asctime)s %(levelname)-6s line %(lineno)-4s %(message)s')

    args = parser.parse_args()
    pprint(args)
    config = ConfigParser.ConfigParser()
    config.read(db_config_file)

    logging.debug(config.sections())

    if not config.has_section(args.database):
        logging.info('DB alias not found in the config file {} -> [{}]'.format(db_config_file, args.database))
        sys.exit(1)
    else:
        logging.info('Found the DB settings in the config file. Continuing.')
        db_type = config.get(args.database, 'db_type')
        db_host = config.get(args.database, 'db_host')
        db_user = config.get(args.database, 'db_user')
        db_pass = config.get(args.database, 'db_pass')
        db_port = config.get(args.database, 'db_port')
        db_name = config.get(args.database, 'db_name')

    db_object = get_database_object(db_type, db_host, db_name, db_user, db_pass, db_port)
    logging.debug('DB object created: {}'.format(db_object))

    if args.command == 'backup':
        logging.info('Chose to backup {}'.format(db_object.db_host))
        logging.info('Dump file: [{}]'.format(db_object.dump_file_name))
        db_object.dump()
        logging.info('Dumping DB finished.')
        if args.upload_url:
            print('Uploading to the desired URL: {}'.format(args.upload_url))
            upload_http_put(db_object.dump_file, args.upload_url)

    if args.command == 'clone':
        logging.info('Going to clone_to from one DB to another.')
        dev_db = get_database_object(db_type, args.dev_host, args.dev_name, args.dev_user, args.dev_pass,
                                     args.dev_port)
        db_object.clone_to(dev_db, args.latest_local)

    if args.command == 'clean':
        logging.info('Cleaning the dumps directory to make room for more dumps.')
        file_list = cleanup_backups.find_files_for_delete(db_object.get_dump_dir(), db_object.db_host,
                                                          db_object.db_name)
        cleanup_backups.delete_file_list(file_list)
Esempio n. 29
0
def dict2config(jdict, section):
    config = ConfigParser.ConfigParser()
    if not config.has_section(section):
        config.add_section(section)
    for k,v in jdict.iteritems():
        config.set(section, k, str(v))
    return config
Esempio n. 30
0
    def enabled(self, sect):
        if not config.has_section(sect):
            return False

        if not config.getboolean(sect, 'enable'):
            return False

        try:
            per = config.get(sect, 'percentage')             # allow '0'
            if per and int(per) < random.randint(1, 100):
                return False

            time_ = config.get(sect, 'time')
            if time_ and not util.time_in(time_):
                return False

            if 'user_name' in self.data:
                pattern = config.get(sect, 'user_pattern')
                self.data['user_match'] = re.search(pattern,
                                                    self.data['user_name'])
                if not self.data['user_match']:
                    return False

            if 'text' in self.data:
                pattern = config.get(sect, 'pattern')
                self.data['match'] = re.search(pattern, self.data['text'])
                if not self.data['match']:
                    return False
        except:
            logging.exception('[%s] %s', sect, self.data)
            return False

        return True
Esempio n. 31
0
 def write_sensitive_properties(self, pg_id, sensitive_file):
     config = configparser.RawConfigParser()
     config.optionxform = str  # Preserve case
     config.read(sensitive_file)  # This file shouldn't be checked in
     processors = self.get_processors_by_pg(pg_id)
     for processor in processors["processors"]:
         self.logger.debug("Processor: {}".format(
             processor["component"]["name"]))
         if config.has_section(processor["component"]["name"]):
             self.logger.debug("Found. Setting properties")
             properties = {}
             for name, value in config.items(
                     processor["component"]["name"]):
                 properties[name] = value
                 self.logger.debug("{} = {}".format(name, value))
             rtn = self.set_processor_properties(processor, properties)
             self.logger.debug(
                 "set_processor_properties returned {}".format(
                     json.dumps(rtn)))
         for key, value in processor['component']['config'][
                 'properties'].items():
             if value is None:
                 continue
             self.logger.debug("Checking properties. {}/{}".format(
                 key, value))
             # If our value is a uuid then it's likely a controller service. Update those properties.
             if re.search(
                     '[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}',
                     value):
                 self.recurse_update_controller(value, config)
Esempio n. 32
0
def dict2config(jdict, section):
    config = ConfigParser.ConfigParser()
    if not config.has_section(section):
        config.add_section(section)
    for k, v in jdict.iteritems():
        config.set(section, k, str(v))
    return config
Esempio n. 33
0
def reset():
    global bin_paths
    global config
    global configs_found
    global tivos_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print('WARNING: pyTivo.conf does not exist.\n' +
              'Assuming default values.')
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith('_tivo_'):
            tsn = section[6:]
            if tsn.upper() not in ['SD', 'HD', '4K']:
                tivos_found = True
                tivos[tsn] = Bdict(config.items(section))

    for section in ['Server', '_tivo_SD', '_tivo_HD', '_tivo_4K']:
        if not config.has_section(section):
            config.add_section(section)
Esempio n. 34
0
    def action(self, now, name):
        sect = ':'.join(('schedule', name))

        self.state.setdefault(name, {
            'name': name,
            'sect': sect,
            'time': now,
        })

        if not config.has_section(sect):
            logging.error('[schedule] [%s] invalid name', name)
            del self.state[name]
            return

        if not config.getboolean(sect, 'enable'):
            del self.state[name]
            return

        interval = config.getint(sect, 'interval')
        if interval < 60:
            logging.error('[schedule] [%s] interval too short', name)
            del self.state[name]
            return

        actsect = ':'.join(('action', config.get(sect, 'action')))
        if not config.has_section(actsect):
            logging.error('[schedule] [%s] invalid action', name)
            del self.state[name]
            return

        time_ = config.get(name, 'time')
        if time_ and not util.time_in(time_):
            return

        if self.state[name]['time'] + interval > now:
            return

        obj = EventHandler({})
        if not obj.enabled(actsect):
            return

        obj.action(actsect)

        if obj.body:
            post(obj.body)

        self.state[name]['time'] = now
Esempio n. 35
0
    def __init__(self, config_file, tvdb):
        config = configparser.ConfigParser()
        config.read(config_file)
        log_file = config.get('main', 'log.config', fallback=None)
        if log_file:
            import yaml
            with open(log_file) as f:
                log_config = yaml.load(f)
                logging.config.dictConfig(log_config)

        template = config.get(
            'directories',
            'out.pattern',
            fallback=
            '${series}/Season ${season}/${series} - S${season|zpad}E${episode|zpad} - ${episode_name}.${ext}'
        )
        self.template = template

        self.wtv_in_dir = config.get('directories', 'tv.in')
        self.tv_pattern = config.get('directories', 'tv.pattern')
        self.com_in_dir = config.get('directories', 'commercial.in')
        self.srt_in_dir = config.get('directories', 'srt.in')
        self.temp_dir = config.get('directories', 'temp.dir')
        self.out_dir = config.get('directories', 'out.dir')
        self.delete_source = config.getboolean('directories',
                                               'delete.source.files',
                                               fallback=True)

        self.convert_config = convert_config_from_config_section(
            config, 'transcode')

        if config.has_section('ffmpeg'):
            logger.error('You are using an outdated configuration')
            raise Exception('You are using an outdated configuration')

        self.debug = config.getboolean('main', 'debug', fallback=False)

        self.ccextractor_exe = config.get('ccextractor',
                                          'executable',
                                          fallback=ccextractor())
        self.ccextractor_run = config.getboolean('ccextractor',
                                                 'run.if.missing',
                                                 fallback=False)

        self.comskip_exe = config.get('comskip', 'executable', fallback=None)
        self.comskip_run = config.getboolean('comskip',
                                             'run.if.missing',
                                             fallback=comskip())
        self.comskip_ini = config.get('comskip', 'comskip.ini', fallback=None)

        db_file = config.get('main', 'database.file', fallback='db.sqlite')

        self.wtvdb = WtvDb(db_file)
        self.tvdb = tvdb

        if self.convert_config.include_subtitles:
            logger.warning(
                'Include Subtitles is True. This usually does not work with TV captions.'
            )
Esempio n. 36
0
def get_tsn(name, tsn=None, raw=False):
    if tsn and config.has_section('_tivo_' + tsn):
        try:
            return config.get('_tivo_' + tsn, name, raw)
        except NoOptionError:
            pass
    section_name = get_section(tsn)
    if config.has_section(section_name):
        try:
            return config.get(section_name, name, raw)
        except NoOptionError:
            pass
    try:
        return config.get('Server', name, raw)
    except NoOptionError:
        pass
    return None
Esempio n. 37
0
    def session_login(self, login, password):
        # Initialize the database (kctl).
        db_init()

        # Validate parameters.
        login = validate_login(login)
        password = validate_password(password)

        # Load config
        config = CustomConfigParser()
        config.read(ini_conf_path)

        # Check login/pass pair.
        if not config.has_section(login):
            raise xmlrpclib.Fault(201, "Invalid login or password.")
        goodpass = config.get_default(login, "password", "").strip(" ")
        if goodpass == "":
            log.error("Invalid configuration 'password' for login '%s'." % ( login ) )
            raise xmlrpclib.Fault(201, "Invalid login or password.")                
        if goodpass != password: 
            raise xmlrpclib.Fault(201, "Invalid login or password.")

        # Create session.
        session = kweb_session.ksession_get_session(database=sess_database)
        session.data["kpsapi"] = 1
        session.data["start_stamp"] = int(time.time())

        # Load security context.
        security_ctx = config.get_default(login, "security_ctx", "").strip(" ")
        if security_ctx not in valid_security_contexts:
            log.error("Invalid configuration 'security_ctx' for login '%s'." % ( login ) )
            raise xmlrpclib.Fault(106, "Invalid KPS API configuration.")
        session.data["security_ctx"] = security_ctx
        if session.data["security_ctx"] == KAPI_SECURITY_CTX_ORG:
            str_org_id = config.get_default(login, "org_id", "").strip(" ")
            if not str_org_id.isdigit():
                log.error("Invalid configuration 'org_id' for login '%s'." % ( login ) )
                raise xmlrpclib.Fault(106, "Invalid KPS API configuration.")
            org_id = int(str_org_id)
            if org_id == 0:
                org_id = sdb_get_main_org_id()
                if not org_id:
                     raise xmlrpclib.Fault(241, "No main organization yet.")
            session.data["org_id"] = org_id

        # Load keys.
        session.data["main_key_id"] = main_key_id()
        try:
            str_key_id = config.get(login, "key_id").strip(" ")
            if str_key_id == "":
                session.data["key_id"] = session.data["main_key_id"]
            elif str_key_id.isdigit():
                session.data["key_id"] = int(str_key_id)
            else:
                log.error("Invalid configuration 'key_id' for login '%s'." % ( login ) )
                raise xmlrpclib.Fault(106, "Invalid KPS API configuration.")
        except ConfigParser.NoOptionError, e:
            session.data["key_id"] = session.data["main_key_id"]
Esempio n. 38
0
def get_mon_api_config(config):
    mon_api_config = {
        'is_enabled': False,
        'url': '',
        'project_name': '',
        'project_id': '',
        'project_domain_name': '',
        'project_domain_id': '',
        'ca_file': '',
        'insecure': '',
        'username': '',
        'password': '',
        'use_keystone': True,
        'keystone_url': '',
        'dimensions': None,
        'max_buffer_size': 1000,
        'backlog_send_rate': 5
    }

    if config.has_option("Main", "dimensions"):
        # parse comma separated dimensions into a dimension list
        try:
            dim_list = [
                dim.split(':')
                for dim in config.get('Main', 'dimensions').split(',')
            ]
            mon_api_config['dimensions'] = {
                key.strip(): value.strip()
                for key, value in dim_list
            }
        except ValueError:
            mon_api_config['dimensions'] = {}

    if config.has_section("Api"):
        options = {
            "url": config.get,
            "project_name": config.get,
            "project_id": config.get,
            "project_domain_name": config.get,
            "project_domain_id": config.get,
            "ca_file": config.get,
            "insecure": config.get,
            "username": config.get,
            "password": config.get,
            "use_keystone": config.getboolean,
            "keystone_url": config.get,
            "max_buffer_size": config.getint,
            "backlog_send_rate": config.getint,
            "amplifier": config.getint
        }

        for name, func in options.iteritems():
            if config.has_option("Api", name):
                mon_api_config[name] = func("Api", name)

    return mon_api_config
Esempio n. 39
0
def getConfiguration(cfgfile=None, config_required={'Main': {'key1': 'value1', 'key2': 'value2'}}):
    '''
    read an ini configuration file and return a dictionary of key/value pairs
    update configuration file if missing any sections
    accepts: 
        cfgfile - path to configuration file
        config_required - nested dictionary in the following format:
        {'Section1':
            {'key1': 'value1', 'key2': 'value2'},
            
         'Section 2':
            {'key1': 'value1'}
        }
    '''
    if not cfgfile:
        raise ValueError('no configuration file specified')
    # required configuraiton options
    # Section: {'option': 'default value'}
    logger = logging.getLogger(__name__)
    logger.debug('getting configuration from file: {}'.format(cfgfile))
    cfgpath = os.path.dirname(cfgfile)
#     config_required = {
#         'Main': {'credentials': os.path.join(cfgpath, 'credentials/'), 
#                  },
#         }

    config = configuration.get_config(cfgfile)

    update_config = False

    logger.debug('checking sections')
    for section, values in list(config_required.items()):
        if not config.has_section(section):
            logger.warning('section: {} not found in {}'.format(section, cfgfile))
            logger.debug('adding section {}'.format(section))
            config.add_section(section)
            update_config = True
        for option, value in list(values.items()):
            if not config.has_option(section, option):
                logger.warning('option: {} not found in {}'.format(option, cfgfile))
                logger.debug('adding option {}: {}'.format(option, value))

                config.set(section, option, value)
                update_config = True


    # for section, options in config_required.items():

    if update_config:
        try:
            logger.debug('updating configuration file at: {}'.format(cfgfile))
            configuration.create_config(cfgfile, config)
        except Exception as e:
            logger.error(e)
            
    return(config)
Esempio n. 40
0
 def from_config(self, config):
     if not config.has_section(self.name):
         return {}
     res = {}
     for opt in config.options(self.name):
         value = config.get(self.name, opt)
         if not value and opt in self.required:
             continue
         res[opt] = value
     return res
Esempio n. 41
0
    def session(self):
        """ 
        Session property used for sqlalchemy
        """
        if not config.has_section("Database"):
            return None

        if not hasattr(self, "_session") or self._session == None:
            from models.model import engine
            from sqlalchemy.orm import sessionmaker
            self._session = sessionmaker(bind=engine)()
        return self._session
Esempio n. 42
0
def load_config(defaults, files):
    config = ConfigParser.ConfigParser()
    with open(defaults, 'r') as f:
        config.readfp(f)
    config.read(files)

    if 'ACCOUNTING_CLIENT_DIR' in os.environ:
        # hack to make it possible to find client dir in buildbot
        if config.has_section('accounting'):
            config.set('accounting', 'client_dir',
                       os.environ['ACCOUNTING_CLIENT_DIR'])

    return config
def get_config_value(section, key, default=None):
    config = get_config_parser()
    if not config.has_section(section):
        if default:
            return default
    if default is None:
        return config.get(section=section, option=key)
    try:
        return config.get(section=section, option=key)
    except configparser.NoOptionError:
        return default
    except configparser.NoSectionError:
        return default
Esempio n. 44
0
    def recurse_update_controller(self, controller_id, config):
        self.logger.debug("Updating controller id {}".format(controller_id))
        controller_service = self.get_controller_service(controller_id)
        if controller_service is None:
            self.logger.warning(
                "Could not find controller service with id: {}".format(
                    controller_id))
            return

        self.logger.debug("Updating {}".format(
            json.dumps(controller_service['component']['name'])))
        state = controller_service['component']['state']
        if state == 'ENABLED':
            self.logger.debug("Disabling controller")
            controller_service = self.update_controller_status(
                controller_service, self.CONTROLLER_DISABLED)
            self.logger.debug("{}".format(json.dumps(controller_service)))

        controller_name = controller_service['component']['name']
        config_section = controller_name
        if 'config_section' in controller_service['component']['properties']:
            config_section = controller_service['component']['properties'][
                'config_section']
        self.logger.info("using section: {}".format(config_section))
        if config.has_section(config_section):
            controller_obj = {
                "component": {
                    "id": controller_service["component"]["id"],
                    "properties": {}
                },
                "revision": {
                    "version": controller_service["revision"]["version"]
                }
            }
            for name, value in config.items(config_section):
                if not name.startswith("_"):
                    self.logger.debug("Controller Setting {}={}".format(
                        name, value))
                    controller_obj["component"]["properties"][name] = value
            self.logger.debug("controller properties {}".format(
                json.dumps(controller_obj)))
            controller_service = self.update_controller_service(controller_obj)
            if controller_service is not None:
                self.logger.debug("update controller returned: {}".format(
                    json.dumps(controller_service)))
            else:
                self.logger.warning(
                    "update controller returned None. Did it fail?")
        rtn = self.update_controller_status(controller_service,
                                            self.CONTROLLER_ENABLED)
def user_update(username):
    if not config: raise ValueError('Configuration not loaded')
    if not config.has_section('users'):
        config.add_section('users')
    if config.has_option('users', username):
        print 'Changing password for %s' % username
    else:
        print 'Adding new user %s' % username
    password = getpass.getpass('Password: '******' * Updating OFTG-Ninja config ...'
        return True
    else:
        print ' * Failed to update user'
        return False
Esempio n. 46
0
def get169Setting(tsn):
    if not tsn:
        return True

    tsnsect = "_tivo_" + tsn
    if config.has_section(tsnsect):
        if config.has_option(tsnsect, "aspect169"):
            try:
                return config.getboolean(tsnsect, "aspect169")
            except ValueError:
                pass

    if get169Blacklist(tsn) or get169Letterbox(tsn):
        return False

    return True
Esempio n. 47
0
    def __init__(self, libjars_in_hdfs=None, input_format=None):
        libjars_in_hdfs = libjars_in_hdfs or []
        config = configuration.get_config()
        streaming_jar = config.get('hadoop', 'streaming-jar', '/tmp/hadoop-streaming.jar')

        if config.has_section('job-conf'):
            job_confs = dict(config.items('job-conf'))
        else:
            job_confs = {}

        super(MapReduceJobRunner, self).__init__(
            streaming_jar,
            input_format=input_format,
            libjars_in_hdfs=libjars_in_hdfs,
            jobconfs=job_confs,
        )
Esempio n. 48
0
def get169Setting(tsn):
    if not tsn:
        return True

    tsnsect = '_tivo_' + tsn
    if config.has_section(tsnsect):
        if config.has_option(tsnsect, 'aspect169'):
            try:
                return config.getboolean(tsnsect, 'aspect169')
            except ValueError:
                pass

    if get169Blacklist(tsn) or get169Letterbox(tsn):
        return False

    return True
Esempio n. 49
0
    def __init__(self, libjars_in_hdfs=None, input_format=None):
        libjars_in_hdfs = libjars_in_hdfs or []
        config = configuration.get_config()
        streaming_jar = config.get('hadoop', 'streaming-jar', '/tmp/hadoop-streaming.jar')

        if config.has_section('job-conf'):
            job_confs = dict(config.items('job-conf'))
        else:
            job_confs = {}

        super(MapReduceJobRunner, self).__init__(
            streaming_jar,
            input_format=input_format,
            libjars_in_hdfs=libjars_in_hdfs,
            jobconfs=job_confs,
        )
Esempio n. 50
0
def get_mon_api_config(config):
    mon_api_config = {'is_enabled': False,
                      'url': '',
                      'project_name': '',
                      'project_id': '',
                      'project_domain_name': '',
                      'project_domain_id': '',
                      'ca_file': '',
                      'insecure': '',
                      'username': '',
                      'password': '',
                      'use_keystone': True,
                      'keystone_url': '',
                      'dimensions': None,
                      'max_buffer_size': 1000,
                      'backlog_send_rate': 5}

    if config.has_option("Main", "dimensions"):
        # parse comma separated dimensions into a dimension list
        try:
            dim_list = [dim.split(':') for dim in config.get('Main', 'dimensions').split(',')]
            mon_api_config['dimensions'] = {key.strip(): value.strip() for key, value in dim_list}
        except ValueError:
            mon_api_config['dimensions'] = {}

    if config.has_section("Api"):
        options = {"url": config.get,
                   "project_name": config.get,
                   "project_id": config.get,
                   "project_domain_name": config.get,
                   "project_domain_id": config.get,
                   "ca_file": config.get,
                   "insecure": config.get,
                   "username": config.get,
                   "password": config.get,
                   "use_keystone": config.getboolean,
                   "keystone_url": config.get,
                   "max_buffer_size": config.getint,
                   "backlog_send_rate": config.getint,
                   "amplifier": config.getint}

        for name, func in options.iteritems():
            if config.has_option("Api", name):
                mon_api_config[name] = func("Api", name)

    return mon_api_config
Esempio n. 51
0
    def __getitem__(self, key):

        config = self._config_parser
        if config and config.has_section('options') and config.has_option('options', key):
            default = self._config_parser.get('options', key)
        else:
            default = self._options.get(key, None)

        # check command line options (1)
        if self._opts.get(key):
            return self._opts[key]

        # check environment (2)
        if self._prefix:
            return os.environ.get(self._prefix + '_' + key.upper(), default)

        return default
    def create_models(self, config):
        self.models = dict()
        self.main_model = None
        for section in config.sections():
            if section.startswith('table:'):
                table_schema = config.items(section)
                table, name = section.split(':', 2)
                index_section = 'index:' + name
                index_schema = []
                if config.has_section(index_section):
                    index_schema = config.items(index_section)
                self.init_model(name, table_schema, index_schema)

        self.sorted_models = sorted(self.models.values(),
            key=lambda m: len(m._meta.table_options.get('__iter__', [])))

        if self.client_config['resume'] and not self.main_model:
            raise ValueError('Main model is required for resume mode')
Esempio n. 53
0
def getConfiguration(cfgfile):
    # required configuraiton options
    # Section: {'option': 'default value'}
    logger = logging.getLogger(__name__)
    logger.debug('getting configuration from file: {}'.format(cfgfile))
    cfgpath = os.path.dirname(cfgfile)
    config_required = {
        'Main': {
            'credentials': os.path.join(cfgpath, 'credentials/'),
        },
    }

    config = configuration.get_config(cfgfile)

    update_config = False

    logger.debug('checking sections')
    for section, values in list(config_required.items()):
        if not config.has_section(section):
            logger.warning('section: {} not found in {}'.format(
                section, cfgfile))
            logger.debug('adding section {}'.format(section))
            config.add_section(section)
            update_config = True
        for option, value in list(values.items()):
            if not config.has_option(section, option):
                logger.warning('option: {} not found in {}'.format(
                    option, cfgfile))
                logger.debug('adding option {}: {}'.format(option, value))

                config.set(section, option, value)
                update_config = True

    # for section, options in config_required.items():

    if update_config:
        try:
            logger.debug('updating configuration file at: {}'.format(cfgfile))
            configuration.create_config(cfgfile, config)
        except Exception as e:
            logger.error(e)

    return (config)
Esempio n. 54
0
    def __init__(self, config_file):
        config = configparser.ConfigParser()
        config.read(config_file)

        # Directories
        self.movie_in_dir = config.get('directories', 'movie.dir.in')
        self.tv_in_dir = config.get('directories', 'tv.dir.in')
        self.working_dir = config.get('directories', 'working.dir')
        self.movie_out_dir = config.get('directories', 'movie.dir.out')
        self.tv_out_dir = config.get('directories', 'tv.dir.out')

        # Backup
        self.backup_enabled = config.getboolean('backup',
                                                'enabled',
                                                fallback=True)
        self.rclone_exe = config.get('backup', 'rclone')
        self.rclone_args = shlex.split(
            config.get('backup', 'rclone.args', fallback=''))
        self.split_exe = config.get('backup', 'split')
        self.backup_path = config.get('backup', 'backup.path')
        self.max_size = int(config.get('backup', 'max.size')) * (1024**3)
        self.split_size = config.get('backup', 'split.size')

        self.movie_convert_config = convert_config_from_config_section(
            config, 'movie.transcode')
        self.tv_convert_config = convert_config_from_config_section(
            config, 'tv.transcode')

        if config.has_section('transcode'):
            raise Exception(
                'Config file is out dated. Please update to use movie.transcode and tv.transcode'
            )

        # Logging
        file = config.get('logging', 'config', fallback=None)
        if file:
            import yaml
            with open(file) as f:
                log_config = yaml.load(f)
                logging.config.dictConfig(log_config)
        db_file = config.get('logging', 'db', fallback='processed.shelve')
        self.db = ProcessedDatabase(db_file)
Esempio n. 55
0
    def _setup(self):
        """
        Sets up the worker task. Setup logging, Pylons globals and so on
        """

        global log
        config = ConfigParser.ConfigParser()
        config.read(self.inifile)

        if not config.has_section('loggers'):
            # Sorry, the logger has not been set up yet
            print('Config file does not have [loggers] section')
            sys.exit(1)

        logging.config.fileConfig(self.inifile)
        logger_name = 'debexpo.worker'
        log = logging.getLogger(logger_name)

        sys.path.append(os.path.dirname(self.inifile))
        conf = appconfig('config:' + self.inifile)
        pylons.config = load_environment(conf.global_conf, conf.local_conf)
Esempio n. 56
0
def get_config(parse_args=True, cfg_path=None, options=None):
    if parse_args:
        options, _ = get_parsed_args()

    # General config
    agentConfig = {
        'check_freq': DEFAULT_CHECK_FREQUENCY,
        'dogstatsd_port': 8125,
        'dogstatsd_target': 'http://localhost:17123',
        'graphite_listen_port': None,
        'hostname': None,
        'listen_port': None,
        'tags': None,
        'use_ec2_instance_id': False,  # DEPRECATED
        'version': get_version(),
        'watchdog': True,
        'additional_checksd': '/etc/sd-agent/checks.d/',
        'bind_host': get_default_bind_host(),
        'statsd_metric_namespace': None,
        'utf8_decoding': False
    }

    if Platform.is_mac():
        agentConfig['additional_checksd'] = '/usr/local/etc/sd-agent/checks.d/'

    # Config handling
    try:
        # Find the right config file
        path = os.path.realpath(__file__)
        path = os.path.dirname(path)

        config_path = get_config_path(cfg_path, os_name=get_os())
        config = ConfigParser.ConfigParser()
        config.readfp(skip_leading_wsp(open(config_path)))

        # bulk import
        for option in config.options('Main'):
            agentConfig[option] = config.get('Main', option)

        # Store developer mode setting in the agentConfig
        if config.has_option('Main', 'developer_mode'):
            agentConfig['developer_mode'] = _is_affirmative(
                config.get('Main', 'developer_mode'))

        # Allow an override with the --profile option
        if options is not None and options.profile:
            agentConfig['developer_mode'] = True

        #
        # Core config
        #

        # FIXME unnecessarily complex
        if config.has_option('Main', 'sd_account'):
            agentConfig['sd_account'] = config.get('Main', 'sd_account')
        agentConfig['use_forwarder'] = False
        if options is not None and options.use_forwarder:
            listen_port = 17124
            if config.has_option('Main', 'listen_port'):
                listen_port = int(config.get('Main', 'listen_port'))
            agentConfig['sd_url'] = "http://" + agentConfig[
                'bind_host'] + ":" + str(listen_port)
            agentConfig['use_forwarder'] = True
        elif options is not None and not options.disable_sd and options.sd_url:
            agentConfig['sd_url'] = options.sd_url
        elif config.has_option('Main', 'sd_url'):
            agentConfig['sd_url'] = config.get('Main', 'sd_url')
        else:
            # Default agent URL
            agentConfig['sd_url'] = "https://" + agentConfig[
                'sd_account'] + ".agent.serverdensity.io"
        if agentConfig['sd_url'].endswith('/'):
            agentConfig['sd_url'] = agentConfig['sd_url'][:-1]

        # Extra checks.d path
        # the linux directory is set by default
        if config.has_option('Main', 'additional_checksd'):
            agentConfig['additional_checksd'] = config.get(
                'Main', 'additional_checksd')
        elif get_os() == 'windows':
            # default windows location
            common_path = _windows_commondata_path()
            agentConfig['additional_checksd'] = os.path.join(
                common_path, 'ServerDensity', 'checks.d')

        if config.has_option('Main', 'use_dogstatsd'):
            agentConfig['use_dogstatsd'] = config.get(
                'Main', 'use_dogstatsd').lower() in ("yes", "true")
        else:
            agentConfig['use_dogstatsd'] = True

        # Service discovery
        if config.has_option('Main', 'service_discovery_backend'):
            try:
                additional_config = extract_agent_config(config)
                agentConfig.update(additional_config)
            except:
                log.error('Failed to load the agent configuration related to '
                          'service discovery. It will not be used.')

        # Concerns only Windows
        if config.has_option('Main', 'use_web_info_page'):
            agentConfig['use_web_info_page'] = config.get(
                'Main', 'use_web_info_page').lower() in ("yes", "true")
        else:
            agentConfig['use_web_info_page'] = True

        # Which agent key to use
        agentConfig['agent_key'] = config.get('Main', 'agent_key')

        # local traffic only? Default to no
        agentConfig['non_local_traffic'] = False
        if config.has_option('Main', 'non_local_traffic'):
            agentConfig['non_local_traffic'] = config.get(
                'Main', 'non_local_traffic').lower() in ("yes", "true")

        # DEPRECATED
        if config.has_option('Main', 'use_ec2_instance_id'):
            use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id')
            # translate yes into True, the rest into False
            agentConfig['use_ec2_instance_id'] = (
                use_ec2_instance_id.lower() == 'yes')

        if config.has_option('Main', 'check_freq'):
            try:
                agentConfig['check_freq'] = int(
                    config.get('Main', 'check_freq'))
            except Exception:
                pass

        # Custom histogram aggregate/percentile metrics
        if config.has_option('Main', 'histogram_aggregates'):
            agentConfig['histogram_aggregates'] = get_histogram_aggregates(
                config.get('Main', 'histogram_aggregates'))

        if config.has_option('Main', 'histogram_percentiles'):
            agentConfig['histogram_percentiles'] = get_histogram_percentiles(
                config.get('Main', 'histogram_percentiles'))

        # Disable Watchdog (optionally)
        if config.has_option('Main', 'watchdog'):
            if config.get('Main', 'watchdog').lower() in ('no', 'false'):
                agentConfig['watchdog'] = False

        # Optional graphite listener
        if config.has_option('Main', 'graphite_listen_port'):
            agentConfig['graphite_listen_port'] = \
                int(config.get('Main', 'graphite_listen_port'))
        else:
            agentConfig['graphite_listen_port'] = None

        # Dogstatsd config
        dogstatsd_defaults = {
            'dogstatsd_port': 8125,
            'dogstatsd_target':
            'http://' + agentConfig['bind_host'] + ':17123',
        }
        for key, value in dogstatsd_defaults.iteritems():
            if config.has_option('Main', key):
                agentConfig[key] = config.get('Main', key)
            else:
                agentConfig[key] = value

        # Create app:xxx tags based on monitored apps
        agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \
            _is_affirmative(config.get('Main', 'create_dd_check_tags'))

        # Forwarding to external statsd server
        if config.has_option('Main', 'statsd_forward_host'):
            agentConfig['statsd_forward_host'] = config.get(
                'Main', 'statsd_forward_host')
            if config.has_option('Main', 'statsd_forward_port'):
                agentConfig['statsd_forward_port'] = int(
                    config.get('Main', 'statsd_forward_port'))

        # optionally send dogstatsd data directly to the agent.
        if config.has_option('Main', 'dogstatsd_use_ddurl'):
            if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')):
                agentConfig['dogstatsd_target'] = agentConfig['sd_url']

        # Optional config
        # FIXME not the prettiest code ever...
        if config.has_option('Main', 'use_mount'):
            agentConfig['use_mount'] = _is_affirmative(
                config.get('Main', 'use_mount'))

        if options is not None and options.autorestart:
            agentConfig['autorestart'] = True
        elif config.has_option('Main', 'autorestart'):
            agentConfig['autorestart'] = _is_affirmative(
                config.get('Main', 'autorestart'))

        if config.has_option('Main', 'check_timings'):
            agentConfig['check_timings'] = _is_affirmative(
                config.get('Main', 'check_timings'))

        if config.has_option('Main', 'exclude_process_args'):
            agentConfig['exclude_process_args'] = _is_affirmative(
                config.get('Main', 'exclude_process_args'))

        try:
            filter_device_re = config.get('Main', 'device_blacklist_re')
            agentConfig['device_blacklist_re'] = re.compile(filter_device_re)
        except ConfigParser.NoOptionError:
            pass

        if config.has_option("Main", "nagios_perf_cfg"):
            agentConfig["nagios_perf_cfg"] = config.get(
                "Main", "nagios_perf_cfg")

        if config.has_option("Main", "use_curl_http_client"):
            agentConfig["use_curl_http_client"] = _is_affirmative(
                config.get("Main", "use_curl_http_client"))
        else:
            # Default to False as there are some issues with the curl client and ELB
            agentConfig["use_curl_http_client"] = False

        if config.has_section('WMI'):
            agentConfig['WMI'] = {}
            for key, value in config.items('WMI'):
                agentConfig['WMI'][key] = value

        if (config.has_option("Main", "limit_memory_consumption") and
                config.get("Main", "limit_memory_consumption") is not None):
            agentConfig["limit_memory_consumption"] = int(
                config.get("Main", "limit_memory_consumption"))
        else:
            agentConfig["limit_memory_consumption"] = None

        if config.has_option("Main", "skip_ssl_validation"):
            agentConfig["skip_ssl_validation"] = _is_affirmative(
                config.get("Main", "skip_ssl_validation"))

        agentConfig["collect_instance_metadata"] = True
        if config.has_option("Main", "collect_instance_metadata"):
            agentConfig["collect_instance_metadata"] = _is_affirmative(
                config.get("Main", "collect_instance_metadata"))

        agentConfig["proxy_forbid_method_switch"] = False
        if config.has_option("Main", "proxy_forbid_method_switch"):
            agentConfig["proxy_forbid_method_switch"] = _is_affirmative(
                config.get("Main", "proxy_forbid_method_switch"))

        agentConfig["collect_ec2_tags"] = False
        if config.has_option("Main", "collect_ec2_tags"):
            agentConfig["collect_ec2_tags"] = _is_affirmative(
                config.get("Main", "collect_ec2_tags"))

        agentConfig["utf8_decoding"] = False
        if config.has_option("Main", "utf8_decoding"):
            agentConfig["utf8_decoding"] = _is_affirmative(
                config.get("Main", "utf8_decoding"))

        agentConfig["gce_updated_hostname"] = False
        if config.has_option("Main", "gce_updated_hostname"):
            agentConfig["gce_updated_hostname"] = _is_affirmative(
                config.get("Main", "gce_updated_hostname"))

    except ConfigParser.NoSectionError, e:
        sys.stderr.write('Config file not found or incorrectly formatted.\n')
        sys.exit(2)
Esempio n. 57
0
def get_logging_config(cfg_path=None):
    system_os = get_os()
    logging_config = {
        'log_level': None,
        'log_to_event_viewer': False,
        'log_to_syslog': False,
        'syslog_host': None,
        'syslog_port': None,
    }
    if system_os == 'windows':
        logging_config['windows_collector_log_file'] = os.path.join(
            _windows_commondata_path(), 'ServerDensity', 'logs',
            'collector.log')
        logging_config['windows_forwarder_log_file'] = os.path.join(
            _windows_commondata_path(), 'ServerDensity', 'logs',
            'forwarder.log')
        logging_config['windows_dogstatsd_log_file'] = os.path.join(
            _windows_commondata_path(), 'ServerDensity', 'logs',
            'dogstatsd.log')
        logging_config['jmxfetch_log_file'] = os.path.join(
            _windows_commondata_path(), 'ServerDensity', 'logs',
            'jmxfetch.log')
    else:
        logging_config[
            'collector_log_file'] = '/var/log/sd-agent/collector.log'
        logging_config[
            'forwarder_log_file'] = '/var/log/sd-agent/forwarder.log'
        logging_config[
            'dogstatsd_log_file'] = '/var/log/sd-agent/dogstatsd.log'
        logging_config['jmxfetch_log_file'] = '/var/log/sd-agent/jmxfetch.log'
        logging_config['go-metro_log_file'] = '/var/log/sd-agent/go-metro.log'
        logging_config['log_to_syslog'] = True

    config_path = get_config_path(cfg_path, os_name=system_os)
    config = ConfigParser.ConfigParser()
    config.readfp(skip_leading_wsp(open(config_path)))

    if config.has_section('handlers') or config.has_section(
            'loggers') or config.has_section('formatters'):
        if system_os == 'windows':
            config_example_file = "https://github.com/serverdensity/sd-agent/blob/master/packaging/sd-agent/win32/install_files/config_win32.conf"
        else:
            config_example_file = "https://github.com/serverdensity/sd-agent/blob/master/config.cfg.example"

        sys.stderr.write(
            """Python logging config is no longer supported and will be ignored.
            To configure logging, update the logging portion of 'config.cfg' to match:
             '%s'.
             """ % config_example_file)

    for option in logging_config:
        if config.has_option('Main', option):
            logging_config[option] = config.get('Main', option)

    levels = {
        'CRITICAL': logging.CRITICAL,
        'DEBUG': logging.DEBUG,
        'ERROR': logging.ERROR,
        'FATAL': logging.FATAL,
        'INFO': logging.INFO,
        'WARN': logging.WARN,
        'WARNING': logging.WARNING,
    }
    if config.has_option('Main', 'log_level'):
        logging_config['log_level'] = levels.get(
            config.get('Main', 'log_level'))

    if config.has_option('Main', 'log_to_syslog'):
        logging_config['log_to_syslog'] = config.get(
            'Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'log_to_event_viewer'):
        logging_config['log_to_event_viewer'] = config.get(
            'Main',
            'log_to_event_viewer').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'syslog_host'):
        host = config.get('Main', 'syslog_host').strip()
        if host:
            logging_config['syslog_host'] = host
        else:
            logging_config['syslog_host'] = None

    if config.has_option('Main', 'syslog_port'):
        port = config.get('Main', 'syslog_port').strip()
        try:
            logging_config['syslog_port'] = int(port)
        except Exception:
            logging_config['syslog_port'] = None

    if config.has_option('Main', 'disable_file_logging'):
        logging_config['disable_file_logging'] = config.get(
            'Main',
            'disable_file_logging').strip().lower() in ['yes', 'true', 1]
    else:
        logging_config['disable_file_logging'] = False

    return logging_config
Esempio n. 58
0
def main():

    global Topics, TIME_SYNC_UPDATE_MSEC_TOPIC, TIME_SYNC_UPDATE_TOPIC, DontPublish

    parser = argparse.ArgumentParser(
        description='Respond to Time Sync request messages with current time.')
    parser.add_argument(
        "-t",
        "--topic",
        dest="topic",
        action="append",
        help=
        "MQTT topic to which to subscribe.  May be specified multiple times.")
    parser.add_argument("-o",
                        "--host",
                        dest="MqttHost",
                        action="store",
                        help="MQTT host",
                        default=None)
    parser.add_argument("-p",
                        "--port",
                        dest="MqttPort",
                        action="store",
                        help="MQTT host port",
                        type=int,
                        default=None)
    parser.add_argument("-P",
                        "--DontPublish",
                        dest="dontpub",
                        action="store_true",
                        help="Do not actually publish time syncs.",
                        default=False)
    args = parser.parse_args()

    config = configparser.ConfigParser(
        interpolation=configparser.ExtendedInterpolation())
    configFile = GetConfigFilePath()

    config.read(configFile)
    cfgSection = os.path.basename(sys.argv[0]) + "/" + os.environ['HOST']
    logger.info("INI file cofig section is: %s", cfgSection)
    if not config.has_section(cfgSection):
        logger.critical('Config file "%s", has no section "%s".', configFile,
                        cfgSection)
        sys.exit(2)
    if set(config.options(cfgSection)) < RequiredConfigParams:
        logger.critical(
            'Config  section "%s" does not have all required params: "%s", it has params: "%s".',
            cfgSection, RequiredConfigParams, set(config.options(cfgSection)))
        sys.exit(3)

    logger.info("INI file cofig section is: %s", cfgSection)

    cfg = config[cfgSection]
    mqtt_host = cfg['mqtt_host']
    mqtt_port = cfg['mqtt_port']
    TIME_SYNC_UPDATE_MSEC_TOPIC = cfg['mqtt_sync_ms_topic']
    TIME_SYNC_UPDATE_TOPIC = cfg['mqtt_sync_topic']

    if config.has_option(cfgSection, 'mqtt_request_topics'):
        t = cfg['mqtt_request_topics']
        logger.debug('Topics from config is: "%s".', t)
        t = t.split()
        logger.debug('Split topics from config is: "%s".', t)
        Topics = t
        logger.debug('Topics from config is: "%s".', Topics)

    if (args.topic != None) and (len(args.topic) > 0): Topics = args.topic
    if (args.MqttHost != None) and (len(args.MqttHost) > 0):
        mqtt_host = args.MqttHost
    if (args.MqttPort != None) and (len(args.MqttPort) > 0):
        mqtt_port = args.MqttPort  #DontPublish
    if (args.dontpub != None): DontPublish = args.dontpub
    mqtt_port = int(mqtt_port)
    if (mqtt_host is None) or (mqtt_port is None) or (len(Topics) == 0):
        logger.critical(
            'No mqtt_host OR no mqtt_port OR no topics; must quit.')
        sys.exit(1)

    try:
        logger.debug(
            'Connecting to MQTT server at "%s", on port "%s", for topics "%s".',
            mqtt_host, mqtt_port, Topics)
        RecClient.connect(mqtt_host, mqtt_port, 60)
        logger.debug('Mqtt connect returned.')
        logger.debug("Loop immediately after connect returns code: %s",
                     mqtt.error_string(RecClient.loop()))
        if (RecClient._state != mqtt.mqtt_cs_connected):
            logger.debug("Not immediately connected.")
    except Exception as e:
        logger.exception(e)
        sys.exit(2)

    try:
        nowTime = time.time()
        localtime = time.localtime(nowTime)
        timeUs = (nowTime - int(nowTime)) * 1000000
        logger.debug("Begin receive loop at: %s.%06d %s" %
                     (time.strftime("%Y-%m-%d %H:%M:%S", localtime), timeUs,
                      time.strftime("%Z", localtime)))
        RecClient.loop_forever()
    finally:
        logger.debug('Executing finally clause.')
        RecClient.disconnect()
        pass
Esempio n. 59
0
def get_config(parse_args=True, cfg_path=None, options=None):
    if parse_args:
        options, _ = get_parsed_args()

    # General config
    agentConfig = {
        'check_freq': DEFAULT_CHECK_FREQUENCY,
        'dogstatsd_port': 8125,
        'dogstatsd_target': 'http://localhost:17123',
        'graphite_listen_port': None,
        'hostname': None,
        'listen_port': None,
        'tags': None,
        'use_ec2_instance_id': False,  # DEPRECATED
        'version': get_version(),
        'watchdog': True,
        'additional_checksd': '/etc/dd-agent/checks.d/',
        'bind_host': get_default_bind_host(),
        'statsd_metric_namespace': None,
        'utf8_decoding': False
    }

    if Platform.is_mac():
        agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d'

    # Config handling
    try:
        # Find the right config file
        path = os.path.realpath(__file__)
        path = os.path.dirname(path)

        config_path = get_config_path(cfg_path, os_name=get_os())
        config = ConfigParser.ConfigParser()
        config.readfp(skip_leading_wsp(open(config_path)))

        # bulk import
        for option in config.options('Main'):
            agentConfig[option] = config.get('Main', option)

        # Store developer mode setting in the agentConfig
        if config.has_option('Main', 'developer_mode'):
            agentConfig['developer_mode'] = _is_affirmative(
                config.get('Main', 'developer_mode'))

        # Allow an override with the --profile option
        if options is not None and options.profile:
            agentConfig['developer_mode'] = True

        #
        # Core config
        #ap
        if not config.has_option('Main', 'api_key'):
            log.warning(u"No API key was found. Aborting.")
            sys.exit(2)

        if not config.has_option('Main', 'dd_url'):
            log.warning(u"No dd_url was found. Aborting.")
            sys.exit(2)

        # Endpoints
        dd_url = clean_dd_url(config.get('Main', 'dd_url'))
        api_key = config.get('Main', 'api_key').strip()

        # For collector and dogstatsd
        agentConfig['api_key'] = api_key
        agentConfig['dd_url'] = dd_url

        # multiple endpoints
        if config.has_option('Main', 'other_dd_urls'):
            other_dd_urls = map(clean_dd_url,
                                config.get('Main', 'other_dd_urls').split(','))
        else:
            other_dd_urls = []
        if config.has_option('Main', 'other_api_keys'):
            other_api_keys = map(
                lambda x: x.strip(),
                config.get('Main', 'other_api_keys').split(','))
        else:
            other_api_keys = []

        # Forwarder endpoints logic
        # endpoints is:
        # {
        #    'https://app.datadoghq.com': ['api_key_abc', 'api_key_def'],
        #    'https://app.example.com': ['api_key_xyz']
        # }
        endpoints = {dd_url: [api_key]}
        if len(other_dd_urls) == 0:
            endpoints[dd_url] += other_api_keys
        else:
            assert len(other_dd_urls) == len(
                other_api_keys), 'Please provide one api_key for each url'
            for i, other_dd_url in enumerate(other_dd_urls):
                endpoints[other_dd_url] = endpoints.get(
                    other_dd_url, []) + [other_api_keys[i]]

        agentConfig['endpoints'] = endpoints

        # Forwarder or not forwarder
        agentConfig[
            'use_forwarder'] = options is not None and options.use_forwarder
        if agentConfig['use_forwarder']:
            listen_port = 17123
            if config.has_option('Main', 'listen_port'):
                listen_port = int(config.get('Main', 'listen_port'))
            agentConfig['dd_url'] = "http://{}:{}".format(
                agentConfig['bind_host'], listen_port)
        # FIXME: Legacy dd_url command line switch
        elif options is not None and options.dd_url is not None:
            agentConfig['dd_url'] = options.dd_url

        # Forwarder timeout
        agentConfig['forwarder_timeout'] = 20
        if config.has_option('Main', 'forwarder_timeout'):
            agentConfig['forwarder_timeout'] = int(
                config.get('Main', 'forwarder_timeout'))

        # Extra checks.d path
        # the linux directory is set by default
        if config.has_option('Main', 'additional_checksd'):
            agentConfig['additional_checksd'] = config.get(
                'Main', 'additional_checksd')
        elif get_os() == 'windows':
            # default windows location
            common_path = _windows_commondata_path()
            agentConfig['additional_checksd'] = os.path.join(
                common_path, 'Datadog', 'checks.d')

        if config.has_option('Main', 'use_dogstatsd'):
            agentConfig['use_dogstatsd'] = config.get(
                'Main', 'use_dogstatsd').lower() in ("yes", "true")
        else:
            agentConfig['use_dogstatsd'] = True

        # Service discovery
        if config.has_option('Main', 'service_discovery_backend'):
            try:
                additional_config = extract_agent_config(config)
                agentConfig.update(additional_config)
            except:
                log.error('Failed to load the agent configuration related to '
                          'service discovery. It will not be used.')

        # Concerns only Windows
        if config.has_option('Main', 'use_web_info_page'):
            agentConfig['use_web_info_page'] = config.get(
                'Main', 'use_web_info_page').lower() in ("yes", "true")
        else:
            agentConfig['use_web_info_page'] = True

        # local traffic only? Default to no
        agentConfig['non_local_traffic'] = False
        if config.has_option('Main', 'non_local_traffic'):
            agentConfig['non_local_traffic'] = config.get(
                'Main', 'non_local_traffic').lower() in ("yes", "true")

        # DEPRECATED
        if config.has_option('Main', 'use_ec2_instance_id'):
            use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id')
            # translate yes into True, the rest into False
            agentConfig['use_ec2_instance_id'] = (
                use_ec2_instance_id.lower() == 'yes')

        if config.has_option('Main', 'check_freq'):
            try:
                agentConfig['check_freq'] = int(
                    config.get('Main', 'check_freq'))
            except Exception:
                pass

        # Custom histogram aggregate/percentile metrics
        if config.has_option('Main', 'histogram_aggregates'):
            agentConfig['histogram_aggregates'] = get_histogram_aggregates(
                config.get('Main', 'histogram_aggregates'))

        if config.has_option('Main', 'histogram_percentiles'):
            agentConfig['histogram_percentiles'] = get_histogram_percentiles(
                config.get('Main', 'histogram_percentiles'))

        # Disable Watchdog (optionally)
        if config.has_option('Main', 'watchdog'):
            if config.get('Main', 'watchdog').lower() in ('no', 'false'):
                agentConfig['watchdog'] = False

        # Optional graphite listener
        if config.has_option('Main', 'graphite_listen_port'):
            agentConfig['graphite_listen_port'] = \
                int(config.get('Main', 'graphite_listen_port'))
        else:
            agentConfig['graphite_listen_port'] = None

        # Dogstatsd config
        dogstatsd_defaults = {
            'dogstatsd_port': 8125,
            'dogstatsd_target':
            'http://' + agentConfig['bind_host'] + ':17123',
        }
        for key, value in dogstatsd_defaults.iteritems():
            if config.has_option('Main', key):
                agentConfig[key] = config.get('Main', key)
            else:
                agentConfig[key] = value

        # Create app:xxx tags based on monitored apps
        agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \
            _is_affirmative(config.get('Main', 'create_dd_check_tags'))

        # Forwarding to external statsd server
        if config.has_option('Main', 'statsd_forward_host'):
            agentConfig['statsd_forward_host'] = config.get(
                'Main', 'statsd_forward_host')
            if config.has_option('Main', 'statsd_forward_port'):
                agentConfig['statsd_forward_port'] = int(
                    config.get('Main', 'statsd_forward_port'))

        # optionally send dogstatsd data directly to the agent.
        if config.has_option('Main', 'dogstatsd_use_ddurl'):
            if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')):
                agentConfig['dogstatsd_target'] = agentConfig['dd_url']

        # Optional config
        # FIXME not the prettiest code ever...
        if config.has_option('Main', 'use_mount'):
            agentConfig['use_mount'] = _is_affirmative(
                config.get('Main', 'use_mount'))

        if options is not None and options.autorestart:
            agentConfig['autorestart'] = True
        elif config.has_option('Main', 'autorestart'):
            agentConfig['autorestart'] = _is_affirmative(
                config.get('Main', 'autorestart'))

        if config.has_option('Main', 'check_timings'):
            agentConfig['check_timings'] = _is_affirmative(
                config.get('Main', 'check_timings'))

        if config.has_option('Main', 'exclude_process_args'):
            agentConfig['exclude_process_args'] = _is_affirmative(
                config.get('Main', 'exclude_process_args'))

        try:
            filter_device_re = config.get('Main', 'device_blacklist_re')
            agentConfig['device_blacklist_re'] = re.compile(filter_device_re)
        except ConfigParser.NoOptionError:
            pass

        if config.has_option('datadog', 'ddforwarder_log'):
            agentConfig['has_datadog'] = True

        # Dogstream config
        if config.has_option("Main", "dogstream_log"):
            # Older version, single log support
            log_path = config.get("Main", "dogstream_log")
            if config.has_option("Main", "dogstream_line_parser"):
                agentConfig["dogstreams"] = ':'.join(
                    [log_path,
                     config.get("Main", "dogstream_line_parser")])
            else:
                agentConfig["dogstreams"] = log_path

        elif config.has_option("Main", "dogstreams"):
            agentConfig["dogstreams"] = config.get("Main", "dogstreams")

        if config.has_option("Main", "nagios_perf_cfg"):
            agentConfig["nagios_perf_cfg"] = config.get(
                "Main", "nagios_perf_cfg")

        if config.has_option("Main", "use_curl_http_client"):
            agentConfig["use_curl_http_client"] = _is_affirmative(
                config.get("Main", "use_curl_http_client"))
        else:
            # Default to False as there are some issues with the curl client and ELB
            agentConfig["use_curl_http_client"] = False

        if config.has_section('WMI'):
            agentConfig['WMI'] = {}
            for key, value in config.items('WMI'):
                agentConfig['WMI'][key] = value

        if (config.has_option("Main", "limit_memory_consumption") and
                config.get("Main", "limit_memory_consumption") is not None):
            agentConfig["limit_memory_consumption"] = int(
                config.get("Main", "limit_memory_consumption"))
        else:
            agentConfig["limit_memory_consumption"] = None

        if config.has_option("Main", "skip_ssl_validation"):
            agentConfig["skip_ssl_validation"] = _is_affirmative(
                config.get("Main", "skip_ssl_validation"))

        agentConfig["collect_instance_metadata"] = True
        if config.has_option("Main", "collect_instance_metadata"):
            agentConfig["collect_instance_metadata"] = _is_affirmative(
                config.get("Main", "collect_instance_metadata"))

        agentConfig["proxy_forbid_method_switch"] = False
        if config.has_option("Main", "proxy_forbid_method_switch"):
            agentConfig["proxy_forbid_method_switch"] = _is_affirmative(
                config.get("Main", "proxy_forbid_method_switch"))

        agentConfig["collect_ec2_tags"] = False
        if config.has_option("Main", "collect_ec2_tags"):
            agentConfig["collect_ec2_tags"] = _is_affirmative(
                config.get("Main", "collect_ec2_tags"))

        agentConfig["utf8_decoding"] = False
        if config.has_option("Main", "utf8_decoding"):
            agentConfig["utf8_decoding"] = _is_affirmative(
                config.get("Main", "utf8_decoding"))

        agentConfig["gce_updated_hostname"] = False
        if config.has_option("Main", "gce_updated_hostname"):
            agentConfig["gce_updated_hostname"] = _is_affirmative(
                config.get("Main", "gce_updated_hostname"))

    except ConfigParser.NoSectionError as e:
        sys.stderr.write('Config file not found or incorrectly formatted.\n')
        sys.exit(2)

    except ConfigParser.ParsingError as e:
        sys.stderr.write('Config file not found or incorrectly formatted.\n')
        sys.exit(2)

    except ConfigParser.NoOptionError as e:
        sys.stderr.write(
            'There are some items missing from your config file, but nothing fatal [%s]'
            % e)

    # Storing proxy settings in the agentConfig
    agentConfig['proxy_settings'] = get_proxy(agentConfig)
    if agentConfig.get('ca_certs', None) is None:
        agentConfig['ssl_certificate'] = get_ssl_certificate(
            get_os(), 'datadog-cert.pem')
    else:
        agentConfig['ssl_certificate'] = agentConfig['ca_certs']

    return agentConfig