Пример #1
0
def set_config(options, verbosity=0, database=None):
    """Read configuration options from file, merge them with command line
    options.

    :Parameters:
        - 'options': command line options.
        - 'verbosity': verbosity level, int.
        - 'database': output database name.

    :Return:
        - 'verbosity': verbosity level, int.
        - 'database': output database name, string.
    """
    try:
        if options.config:
            config = ConfigParser.ConfigParser()
            config.read(options.config)
            verbosity = config.get('task1', 'verbosity')
            database = config.get('base', 'database')

        if options.verbose:
            verbosity = int(options.verbose)
        if options.database:
            database = options.database

        return verbosity, database

    except (ConfigParser.Error, Exception) as err:
        raise MyError('Configuration error: %s' % err)
Пример #2
0
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None, is_secure=False, **kwargs):
    """
    Connect to the Internet Archive via their S3-like API.

    :type ia_access_key_id: string
    :param ia_access_key_id: Your IA Access Key ID.  This will also look
        in your boto config file for an entry in the Credentials
        section called "ia_access_key_id"

    :type ia_secret_access_key: string
    :param ia_secret_access_key: Your IA Secret Access Key.  This will also
        look in your boto config file for an entry in the Credentials
        section called "ia_secret_access_key"

    :rtype: :class:`boto.s3.connection.S3Connection`
    :return: A connection to the Internet Archive
    """
    from boto.s3.connection import S3Connection
    from boto.s3.connection import OrdinaryCallingFormat

    access_key = config.get("Credentials", "ia_access_key_id", ia_access_key_id)
    secret_key = config.get("Credentials", "ia_secret_access_key", ia_secret_access_key)

    return S3Connection(
        access_key,
        secret_key,
        host="s3.us.archive.org",
        calling_format=OrdinaryCallingFormat(),
        is_secure=is_secure,
        **kwargs
    )
Пример #3
0
    def register_schedule(self):
        channel = config.get('irc', 'channel')

        for schedule in config.getlist('irc', 'schedules'):
            sect = ':'.join(('schedule', schedule))

            # do not evaluate isenabled() here.
            # if it does, the disabled action will never be scheduled.
            if not config.has_section(sect):
                logging.error('[schedule] [%s] no such schedule', sect)
                continue

            if not config.has_option(sect, 'action'):
                logging.error('[schedule] [%s] no action specified', sect)
                continue

            action = ':'.join(('action', config.get(sect, 'action')))
            if not config.has_section(action):
                logging.error('[schedule] [%s] invalid action specified', sect)
                continue

            interval = config.getint(sect, 'interval')
            if interval < 60:
                logging.error('[schedule] [%s] interval too short', sect)
                continue

            self.reactor.register_schedule(interval, self.do_action,
                                           action, self.connection,
                                           None, {'target': channel}, sect)
            logging.info('[schedule] [%s] registered', sect)
Пример #4
0
def main():
    # Set default configuration
    port = '8069'
    host = 'localhost'
    path = expanduser("~") + '/.proxypos/config/proxypos.yaml'

    # Read configuration file and init config handler
    if os.path.exists(path):
        with open(path, 'r') as configfile:
            config.import_config(configfile.read())
    else:
        raise Exception('Please create a default configuration file!')

    # Start log file
    logging.config.dictConfig(config.get('logs'))

    # Interactive mode
    logger = logging.getLogger(__name__)
    logger.info("ProxyPos server starting up...")
    logger.info("Listening on http://%s:%s/" % (config.get('app.host') or host,
                                                config.get('app.port') or port))
    run(app,
        host=config.get('app.host') or host,
        port=config.get('app.port') or port,
        quiet=True)
Пример #5
0
    def isenabled(sect, data={}):
        if not config.has_section(sect):
            return False

        if not config.getboolean(sect, 'enable'):
            return False

        try:
            per = config.get(sect, 'percentage')             # allow '0'
            if per and int(per) < random.randint(1, 100):
                return False

            time_ = config.get(sect, 'time')
            if time_ and not util.time_in(time_):
                return False

            if 'source' in data:
                pattern = config.get(sect, 'source_pattern')
                data['source_match'] = re.search(pattern, data['source'])
                if not data['source_match']:
                    return False

            if 'message' in data:
                pattern = config.get(sect, 'pattern')
                data['match'] = re.search(pattern, data['message'])
                if not data['match']:
                    return False
        except:
            logging.exception('[%s] %s', sect, data)
            return False

        return True
Пример #6
0
def configure_logging( config ):
    """
    Allow some basic logging configuration to be read from the cherrpy
    config.
    """
    format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
    level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
    destination = config.get( "log_destination", "stdout" )
    log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
    # Get root logger
    root = logging.getLogger()
    # Set level
    root.setLevel( level )
    # Remove old handlers
    for h in root.handlers[:]: 
        root.removeHandler(h)
    # Create handler
    if destination == "stdout":
        handler = logging.StreamHandler( sys.stdout )
    else:
        handler = logging.FileHandler( destination )
    # Create formatter
    formatter = logging.Formatter( format )    
    # Hook everything up
    handler.setFormatter( formatter )
    root.addHandler( handler )
Пример #7
0
 def on_welcome(self, conn, event):
     channel = config.get('irc', 'channel')
     key = config.get('irc', 'channel_key')
     conn.join(channel, key)
     logging.info('[welcome] joined <%s>', channel)
     self.unregister_schedule()
     self.register_schedule()
def load_config(app):
    ''' Reads the config file and loads configuration properties
    into the Flask app.
    :param app: The Flask app object.
    '''

    # Get the path to the application directory,
    # that's where the config file resides.
    par_dir = os.path.join(__file__, os.pardir)
    par_dir_abs_path = os.path.abspath(par_dir)
    app_dir = os.path.dirname(par_dir_abs_path)

    # Read config file
    # FIXME: Use the "common pattern" described in "Configuring from Files":
    # http://flask.pocoo.org/docs/config/
    config = ConfigParser.RawConfigParser()
    config_filepath = app_dir + '/config.cfg'
    config.read(config_filepath)

    # Set up config properties
    app.config['SERVER_PORT'] = config.get('Application', 'SERVER_PORT')
    app.config['BASE_PATH'] = config.get('Application', 'BASE_PATH')

    app.config['API_ELECTION_RESULTS'] = config.get('Api', 'ELECTION_RESULTS')

    # Logging path might be relative or starts from the root.
    # If it's relative then be sure to prepend the path with
    # the application's root directory path.
    log_path = config.get('Logging', 'PATH')
    if log_path.startswith('/'):
        app.config['LOG_PATH'] = log_path
    else:
        app.config['LOG_PATH'] = app_dir + '/' + log_path

    app.config['LOG_LEVEL'] = config.get('Logging', 'LEVEL').upper()
Пример #9
0
    def __init__(self, **config):
        self.config = utils.Config(dict(self.defaults, **config))
        logging.config.dictConfig(self.logging_config)
        self.log = logging.getLogger('irc3.' + self.nick)
        self.original_nick = self.nick
        if config.get('verbose'):
            logging.getLogger('irc3').setLevel(logging.DEBUG)
        else:
            level = config.get('level')
            if level is not None:
                level = getattr(logging, str(level), level)
                self.log.setLevel(level)
        self.encoding = self.config['encoding']

        self.loop = self.config.loop
        if self.loop is None:
            self.loop = asyncio.get_event_loop()

        self.events_re = []
        self.events = defaultdict(list)

        self.plugins = {}
        self.includes = set()
        self.include(*self.config.get('includes', []))
        self.recompile()
Пример #10
0
    def __init__(self, *ini, **config):
        config['version'] = version
        self.config = utils.Config(dict(self.defaults, *ini, **config))
        logging.config.dictConfig(self.logging_config)
        if self.server:
            self.log = logging.getLogger('irc3d')
        else:
            self.log = logging.getLogger('irc3.' + (self.nick or 'd'))
        self.original_nick = self.nick
        if config.get('verbose') or config.get('debug'):
            logging.getLogger('irc3').setLevel(logging.DEBUG)
            logging.getLogger('irc3d').setLevel(logging.DEBUG)
        else:
            level = config.get('level')
            if level is not None:
                level = getattr(logging, str(level), level)
                self.log.setLevel(level)
        self.encoding = self.config['encoding']

        self.loop = self.config.loop
        if self.loop is None:
            try:
                self.loop = asyncio.get_event_loop()
            except RuntimeError:
                self.loop = asyncio.new_event_loop()
                asyncio.set_event_loop(self.loop)

        # python 3.4.1 do not have a create_task method. check for it
        self.create_task = getattr(self.loop, 'create_task', self.create_task)

        self.registry = Registry()

        self.include(*self.config.get('includes', []))
Пример #11
0
def main(options, config):
    monitored_pages = list()
    page_monitors = list()
    if options.web_server:
        launch_web_server(monitored_pages)

    for section in config.sections():
        page = MonitoredPage(section,
                             config.get(section, 'url'),
                             float(config.get(section, 'maximum_response_time')),
                             config.get(section, 'should_contain'))
        monitored_pages.append(page)
        page_monitors.append(monitor_page(page, float(options.frequency)))

    try:
        while True:
            for page in monitored_pages:
                try:
                    printable_status = str(page)
                    if page.get_status():
                        logger.info(printable_status)
                    else:
                        logger.warning(printable_status)
                except NoResponse:
                    logger.error(printable_status)
            time.sleep(options.frequency)
    except KeyboardInterrupt:
        logger.info('Ctrl-C caught, exiting.')
        map(lambda x: x.cancel(), page_monitors)
        map(lambda x: x.join(), page_monitors)
        return 0
Пример #12
0
def read_sequence_configs(config):
    """[EXTERNAL_DIRS]."""
    CONFIGS['sequence_dir'] = config.get(
        'sequence_dir',
        fallback=op.join(CONFIGS['unique_temp_dir'], 'sequence')
    )
    CONFIGS['provean_temp_dir'] = op.join(CONFIGS['sequence_dir'], 'provean_temp')
    _validate_provean_temp_dir(config, CONFIGS)

    CONFIGS['pdb_dir'] = config.get('pdb_dir')
    CONFIGS['blast_db_dir'] = config.get('blast_db_dir')
    CONFIGS['blast_db_dir_fallback'] = (
        config.get('blast_db_dir_fallback', fallback=''))
    _validate_blast_db_dir(CONFIGS)

    CONFIGS['archive_dir'] = config.get('archive_dir')
    # Supported archive types are 'directory' and '7zip'
    if CONFIGS['archive_dir'] is None:
        CONFIGS['archive_type'] = None
    elif op.splitext(CONFIGS['archive_dir'])[-1] in ['.7z', '.7zip']:
        assert op.isfile(CONFIGS['archive_dir'])
        CONFIGS['archive_type'] = '7zip'
    else:
        assert op.isdir(CONFIGS['archive_dir'])
        CONFIGS['archive_type'] = 'directory'
    CONFIGS['archive_temp_dir'] = op.join(CONFIGS['temp_dir'], 'archive')
Пример #13
0
def configure_logging( config ):
    """
    Allow some basic logging configuration to be read from the cherrpy
    config.
    """
    # PasteScript will have already configured the logger if the appropriate
    # sections were found in the config file, so we do nothing if the
    # config has a loggers section, otherwise we do some simple setup
    # using the 'log_*' values from the config.
    if config.global_conf_parser.has_section( "loggers" ):
        return
    format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
    level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
    destination = config.get( "log_destination", "stdout" )
    log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
    # Get root logger
    root = logging.getLogger()
    # Set level
    root.setLevel( level )
    # Turn down paste httpserver logging
    if level <= logging.DEBUG:
        logging.getLogger( "paste.httpserver.ThreadPool" ).setLevel( logging.WARN )
    # Remove old handlers
    for h in root.handlers[:]: 
        root.removeHandler(h)
    # Create handler
    if destination == "stdout":
        handler = logging.StreamHandler( sys.stdout )
    else:
        handler = logging.FileHandler( destination )
    # Create formatter
    formatter = logging.Formatter( format )    
    # Hook everything up
    handler.setFormatter( formatter )
    root.addHandler( handler )
Пример #14
0
def main():
    args = make_args()
    config = configparser.ConfigParser()
    utils.load_config(config, args.config)
    for cmd in args.modify:
        utils.modify_config(config, cmd)
    with open(os.path.expanduser(os.path.expandvars(args.logging)), 'r') as f:
        logging.config.dictConfig(yaml.load(f))
    model_dir = utils.get_model_dir(config)
    height, width = tuple(map(int, config.get('image', 'size').split()))
    resize = transform.parse_transform(config, config.get('transform', 'resize_test'))
    transform_image = transform.get_transform(config, config.get('transform', 'image_test').split())
    transform_tensor = transform.get_transform(config, config.get('transform', 'tensor').split())
    # load image
    image_bgr = cv2.imread('image.jpg')
    image_resized = resize(image_bgr, height, width)
    image = transform_image(image_resized)
    tensor = transform_tensor(image).unsqueeze(0)
    # Caffe2
    init_net = caffe2_pb2.NetDef()
    with open(os.path.join(model_dir, 'init_net.pb'), 'rb') as f:
        init_net.ParseFromString(f.read())
    predict_net = caffe2_pb2.NetDef()
    with open(os.path.join(model_dir, 'predict_net.pb'), 'rb') as f:
        predict_net.ParseFromString(f.read())
    p = workspace.Predictor(init_net, predict_net)
    results = p.run([tensor.numpy()])
    logging.info(utils.abs_mean(results[0]))
    logging.info(hashlib.md5(results[0].tostring()).hexdigest())
Пример #15
0
 def __init__(self, config):
     self.command = ''
     self.jd_workflow = None
     self.param = []
     self.log_file = config.get("scheduler", "log_file")
     self.log_level = config.get("scheduler", "log_level")
     self.workflow_not_deployed = config.get("scheduler", "workflow_not_deployed")
Пример #16
0
def setup_logging(default_level=logging.WARNING):
    """Setup logging configuration

    """
    log = logging.getLogger(__name__)
    config = ConfigParser.RawConfigParser()
    fullPath = LiotaConfigPath().get_liota_fullpath()
    if fullPath != '':
        try:
            if config.read(fullPath) != []:
                # now use json file for logging settings
                try:
                    log_path = config.get('LOG_PATH', 'log_path')
                    log_cfg = config.get('LOG_CFG', 'json_path')
                except ConfigParser.ParsingError as err:
                    log.error('Could not parse log config file')
            else:
                raise IOError('Cannot open configuration file ' + fullPath)
        except IOError as err:
            log.error('Could not open log config file')
        mkdir_log(log_path)
        if os.path.exists(log_cfg):
            with open(log_cfg, 'rt') as f:
                config = json.load(f)
            logging.config.dictConfig(config)
            log.info('created logger with ' + log_cfg)
        else:
            # missing logging.json file
            logging.basicConfig(level=default_level)
            log.warn(
                'logging.json file missing,created default logger with level = ' +
                str(default_level))
    else:
        # missing config file
        log.warn('liota.conf file missing')
Пример #17
0
def setup_console_logging(config, verbosity_level):
    if verbosity_level < min(LOG_LEVELS.keys()):
        verbosity_level = min(LOG_LEVELS.keys())
    if verbosity_level > max(LOG_LEVELS.keys()):
        verbosity_level = max(LOG_LEVELS.keys())

    loglevels = config.get('loglevels', {})
    has_debug_loglevels = any([
        level < logging.INFO for level in loglevels.values()])

    verbosity_filter = VerbosityFilter(verbosity_level, loglevels)

    if verbosity_level < 1 and not has_debug_loglevels:
        log_format = config['logging']['console_format']
    else:
        log_format = config['logging']['debug_format']
    formatter = logging.Formatter(log_format)

    if config['logging']['color']:
        handler = ColorizingStreamHandler(config.get('logcolors', {}))
    else:
        handler = logging.StreamHandler()
    handler.addFilter(verbosity_filter)
    handler.setFormatter(formatter)

    logging.getLogger('').addHandler(handler)
Пример #18
0
def check_config():
    """
    Check the config file and print results
    """
    result = {'missing': [], 'changed': []}
    for option in DEFAULT_CONFIG['Poezio']:
        value = config.get(option)
        if value != DEFAULT_CONFIG['Poezio'][option]:
            result['changed'].append((option, value, DEFAULT_CONFIG['Poezio'][option]))
        else:
            value = config.get(option, default='')
            upper = value.upper()
            default = str(DEFAULT_CONFIG['Poezio'][option]).upper()
            if upper != default:
                result['missing'].append(option)

    result['changed'].sort(key=lambda x: x[0])
    result['missing'].sort()
    if result['changed']:
        print('\033[1mOptions changed from the default configuration:\033[0m\n')
        for option, new_value, default in result['changed']:
            print('    \033[1m%s\033[0m = \033[33m%s\033[0m (default: \033[32m%s\033[0m)' % (option, new_value, default))

    if result['missing']:
        print('\n\033[1mMissing options:\033[0m (the defaults are used)\n')
        for option in result['missing']:
            print('    \033[31m%s\033[0m' % option)
Пример #19
0
    def _makeConfig(self):
        import datetime

        config = deployUtil.vdsmImport("config").config

        if not os.path.exists(VDSM_CONF):
            logging.debug("makeConfig: generating conf.")
            lines = []
            lines.append("# Auto-generated by vds_bootstrap at:" + str(datetime.datetime.now()) + "\n")
            lines.append("\n")

            lines.append("[vars]\n")  # Adding ts for the coming scripts.
            lines.append("trust_store_path = " + config.get("vars", "trust_store_path") + "\n")
            lines.append("ssl = " + config.get("vars", "ssl") + "\n")

            if config.getboolean("vars", "fake_kvm_support"):
                lines.append("fake_kvm_support = true\n")

            lines.append("\n")

            # Adding mgt port for the coming scripts.
            lines.append("[addresses]\n")
            lines.append("management_port = " + config.get("addresses", "management_port") + "\n")

            logging.debug("makeConfig: writing the following to " + VDSM_CONF)
            logging.debug(lines)
            fd, tmpName = tempfile.mkstemp()
            f = os.fdopen(fd, "w")
            f.writelines(lines)
            f.close()
            os.chmod(tmpName, 0o644)
            shutil.move(tmpName, VDSM_CONF)
        else:
            self.message = "Basic configuration found, skipping this step"
            logging.debug(self.message)
Пример #20
0
    def get_window_size(self, tenantId):
        """
        This method is in charge of retrieve the window size of a tenantId from cloto database.

        :param tenantId: the id of the tenant to request the windowsize
        :return: the window size
        """
        try:
            if self.conn == None:
                self.conn = mysql.connect(
                    charset=config.get("mysql", "charset"),
                    use_unicode=True,
                    host=config.get("mysql", "host"),
                    user=config.get("mysql", "user"),
                    passwd=config.get("mysql", "password"),
                    db=config.get("mysql", "db"),
                )
            cursor = self.conn.cursor()
            cursor.execute('SELECT * FROM cloto.cloto_tenantinfo WHERE tenantId="%s"' % tenantId)
            data = cursor.fetchall()
            if len(data) == 0:
                raise NotFound('{"error": "TenantID %s not found in cloto database"}' % tenantId)
            else:
                tenant_information = data[0]
                window_size = tenant_information[1]

        except Exception, e:
            logging.error("Error %s" % e.message)
            raise e
def main():
    home = os.path.dirname(os.path.abspath(__file__))
    os.chdir(home)

    config = ConfigParser.RawConfigParser()
    config.read('application.conf')
    out_dir = config.get('application', 'out_dir')
    timeout = float(config.get('application', 'timeout'))
    boxes = json.loads(config.get('application', 'boxes_json'))

    logging.config.fileConfig('logging.conf')

    setup_encoding()

    for box in boxes:
        host = box['host']
        username = box['username']
        password = box['password']

        try:
            logging.info('Processing %s', host)
            api = connect(host=host, username=username, password=password, timeout=timeout)
            entries = api(cmd='/ip/arp/print')
            api.close()
            data = set([(entry['mac-address'], entry['interface']) for entry in entries if 'mac-address' in entry])
            file_name = datetime.now().strftime('%Y%m%d%H%M%S') + '-' + host + '.gz'
            file_path = os.path.join(out_dir, file_name)
            with gzip.open(file_path, 'wb') as file:
                for obj in data:
                    file.write(' '.join(obj) + '\n')
            logging.info('%s done', host)
        except:
            _, exc_value, _ = sys.exc_info()
            logging.error(exc_value)
Пример #22
0
def transcode_remote():
    setup_logging()

    config = get_config()
    args   = sys.argv[1:]

    # Check to see if we need to call a user-script to replace/modify the file path
    if config.get("path_script", None):
        idx = 0
        # The file path comes after the "-i" command line argument
        for i, v in enumerate(args):
            if v == "-i":
                idx = i+1
                break

        # Found the requested video path
        path = args[idx]

        try:
            proc = subprocess.Popen([config.get("path_script"), path], stdout=subprocess.PIPE)
            proc.wait()
            new_path = proc.stdout.readline().strip()
            if new_path:
                log.debug("Replacing path with: %s" % new_path)
                args[idx] = new_path
        except Exception, e:
            log.error("Error calling path_script: %s" % str(e))
Пример #23
0
    def enabled(self, sect):
        if not config.has_section(sect):
            return False

        if not config.getboolean(sect, 'enable'):
            return False

        try:
            per = config.get(sect, 'percentage')             # allow '0'
            if per and int(per) < random.randint(1, 100):
                return False

            time_ = config.get(sect, 'time')
            if time_ and not util.time_in(time_):
                return False

            if 'user_name' in self.data:
                pattern = config.get(sect, 'user_pattern')
                self.data['user_match'] = re.search(pattern,
                                                    self.data['user_name'])
                if not self.data['user_match']:
                    return False

            if 'text' in self.data:
                pattern = config.get(sect, 'pattern')
                self.data['match'] = re.search(pattern, self.data['text'])
                if not self.data['match']:
                    return False
        except:
            logging.exception('[%s] %s', sect, self.data)
            return False

        return True
Пример #24
0
def load_config():
    """
    Loads the config files merging the defaults
    with the file defined in environ.LINTREVIEW_SETTINGS if it exists.
    """
    config = Config(os.getcwd())

    if 'LINTREVIEW_SETTINGS' in os.environ:
        config.from_envvar('LINTREVIEW_SETTINGS')
    elif os.path.exists(os.path.join(os.getcwd(), 'settings.py')):
        config.from_pyfile('settings.py')
    else:
        msg = ("Unable to load configuration file. Please "
               "either create ./settings.py or set LINTREVIEW_SETTINGS "
               "in your environment before running.")
        raise ImportError(msg)
    if config.get('LOGGING_CONFIG'):
        logging.config.fileConfig(
            config.get('LOGGING_CONFIG'),
            disable_existing_loggers=False)

    if config.get('SSL_CA_BUNDLE'):
        os.environ['REQUESTS_CA_BUNDLE'] = config.get('SSL_CA_BUNDLE')

    return config
Пример #25
0
def phase_selfcal(field='',vis='',refant='',config=None):
    """
    Self-calibrate phases

    Inputs:
      field       = field to be calibrated
      vis         = measurement set containing field
      config      = ConfigParser object for this project

    Returns:
      None
    """
    #
    # start logger
    #
    logger = logging.getLogger("main")
    #
    # check config
    #
    if config is None:
        logger.critical("Error: Need to supply a config")
        raise ValueError("Config is None")
    #
    # Calculate phase calibration table
    #
    solint = config.get("Self Calibration","solint")
    combine = config.get("Self Calibration","combine")
    logger.info("Calculate phase calibration table with solint={0} and combine={1}".format(solint,combine))
    casa.gaincal(vis=vis,field=field,caltable='{0}_phase_selfcal.cal'.format(field),
                 calmode='p',solint=solint,combine=combine,
                 refant=refant,minsnr=3.0,minblperant=1)
    logger.info("Done.")
Пример #26
0
def load_modules():
    analysis_modules = []
    for section in config:
        if "analysis_module_" in section:
            if not config.getboolean(section, "enabled"):
                continue

            module_name = config.get(section, "module")
            try:
                _module = importlib.import_module(module_name)
            except Exception as e:
                log.error("Unable to import module {0}: {1}".format(module_name, str(e)))
                continue

            class_name = config.get(section, "class")
            try:
                module_class = getattr(_module, class_name)
            except Exception as e:
                log.error("Unable to load module class {0}: {1}".format(module_class, str(e)))
                continue

            try:
                analysis_module = module_class(str(section))
            except Exception as e:
                log.error("Unable to load analysis module {0}: {1}".format(section, str(e)))
                continue

            analysis_modules.append(analysis_module)

    return analysis_modules
Пример #27
0
def udl_trigger(config, loop_once=False):
    """Runs the watcher script on the udl arrivals zone to schedule pipeline
    when a file is ready

    :param config: Entire udl2_conf as flat dictionary
    :pram loop_once: Runs the loop only once if set to True (Needed for testing)
    """
    # get the settings needed for the udl trigger alone
    config = get_config_from_ini(config=config, config_prefix='udl2_trigger', delete_prefix=True)
    file_watcher = FileWatcher(config, append_logs_to='edudl2')
    logger.info('Starting UDL2 trigger loop. Looking at directory => {source_dir}'.format(
                source_dir=config.get(Const.SOURCE_DIR)))
    while True:
        try:
            logger.debug('Searching for new files in {source_dir}'.format(source_dir=config.get(Const.SOURCE_DIR)))
            udl_ready_files = _find_udl_ready_files(file_watcher)
            logger.debug('Found {count} files ready to process'.format(count=str(len(udl_ready_files))))
            for file in udl_ready_files:
                logger.debug('Scheduling pipeline for file - {file}'.format(file=file))
                schedule_pipeline.delay(file)
            if loop_once:
                break
        except KeyboardInterrupt:
            logger.warn('UDL2 trigger process terminated by a user')
            os._exit(0)
        except Exception as e:
            logger.error(e)
        finally:
            time.sleep(float(file_watcher.conf.get(Const.FILE_SYSTEM_SCAN_DELAY)))
    logger.warn('Exiting udl trigger process')
Пример #28
0
def reset():
    global bin_paths
    global config
    global configs_found

    bin_paths = {}

    config = ConfigParser.ConfigParser()
    configs_found = config.read(config_files)
    if not configs_found:
        print ("WARNING: pyTivo.conf does not exist.\n" + "Assuming default values.")
        configs_found = config_files[-1:]

    for section in config.sections():
        if section.startswith("_tivo_"):
            tsn = section[6:]
            if tsn.upper() not in ["SD", "HD"]:
                if config.has_option(section, "name"):
                    tivo_names[tsn] = config.get(section, "name")
                else:
                    tivo_names[tsn] = tsn
                if config.has_option(section, "address"):
                    tivos[tsn] = config.get(section, "address")

    for section in ["Server", "_tivo_SD", "_tivo_HD"]:
        if not config.has_section(section):
            config.add_section(section)
Пример #29
0
    def client_from_config(self, queue_name, config):
        """
        Return an XQueueClient from the configuration object.
        """
        from . import client

        klass = getattr(client, config.get('CLASS', 'XQueueClientThread'))
        watcher = klass(queue_name,
                        xqueue_server=config.get('SERVER', 'http://localhost:18040'),
                        xqueue_auth=config.get('AUTH', (None, None)),
                        http_basic_auth=self.http_basic_auth)

        for handler_config in config.get('HANDLERS', []):
            handler_name = handler_config['HANDLER']
            mod_name, classname = handler_name.rsplit('.', 1)
            module = importlib.import_module(mod_name)

            kw = handler_config.get('KWARGS', {})

            # codejail configuration per handler
            codejail_config = handler_config.get("CODEJAIL", None)
            if codejail_config:
                kw['codejail_python'] = self.enable_codejail(codejail_config)

            handler = getattr(module, classname)
            if kw or inspect.isclass(handler):
                # handler could be a function or a class
                handler = handler(**kw)
            watcher.add_handler(handler)
        return watcher
Пример #30
0
 def __init__(self, args, config):
     self.args = args
     self.config = config
     self.cache_dir = utils.get_cache_dir(config)
     self.model_dir = utils.get_model_dir(config)
     self.category = utils.get_category(config, self.cache_dir if os.path.exists(self.cache_dir) else None)
     self.draw_bbox = utils.visualize.DrawBBox(config, self.category)
     self.anchors = torch.from_numpy(utils.get_anchors(config)).contiguous()
     self.height, self.width = tuple(map(int, config.get('image', 'size').split()))
     self.path, self.step, self.epoch = utils.train.load_model(self.model_dir)
     state_dict = torch.load(self.path, map_location=lambda storage, loc: storage)
     self.dnn = utils.parse_attr(config.get('model', 'dnn'))(model.ConfigChannels(config, state_dict), self.anchors, len(self.category))
     self.dnn.load_state_dict(state_dict)
     self.inference = model.Inference(config, self.dnn, self.anchors)
     self.inference.eval()
     if torch.cuda.is_available():
         self.inference.cuda()
     logging.info(humanize.naturalsize(sum(var.cpu().numpy().nbytes for var in self.inference.state_dict().values())))
     self.create_cap()
     self.create_cap_size()
     self.create_writer()
     self.keys = set(args.keys)
     self.resize = transform.parse_transform(config, config.get('transform', 'resize_test'))
     self.transform_image = transform.get_transform(config, config.get('transform', 'image_test').split())
     self.transform_tensor = transform.get_transform(config, config.get('transform', 'tensor').split())
Пример #31
0
def main():
    if len(sys.argv) < 2 or '-h' in sys.argv:
        print("Usage: update_orgs etc/search.ini [custom_index_names]")
        sys.exit(1)

    parser = ConfigParser()
    parser.read(sys.argv[1])
    config = dict(parser.items('search_engine'))
    config = decode_bool_values(config)
    uo_config = dict(parser.items('update_orgs'))

    if len(sys.argv) > 2:
        config['index_names'] = sys.argv[2]

    logging.config.fileConfig(sys.argv[1])

    logger.info("Starting openprocurement.search.update_orgs v%s", __version__)
    logger.info("Copyright (c) 2015-2018 Volodymyr Flonts <*****@*****.**>")

    # try get exclusive lock to prevent second start
    lock_filename = uo_config.get('pidfile') or 'update_orgs.pid'
    lock_file = open(lock_filename, "w")
    fcntl.lockf(lock_file, fcntl.LOCK_EX + fcntl.LOCK_NB)
    lock_file.write(str(os.getpid()) + "\n")
    lock_file.flush()

    signal.signal(signal.SIGTERM, sigterm_handler)
    # signal.signal(signal.SIGINT, sigterm_handler)

    try:
        chage_process_user_group(config)
    except Exception as e:
        logger.error("Can't change process user: %s", str(e))

    try:
        global engine

        engine = IndexOrgsEngine(config, uo_config)
        source = OrgsSource(config)
        index = OrgsIndex(engine, source, config)
        # manualy reset and prevent second reset on first process_source
        source.reset()
        index.last_current_index = index.current_index
        if config.get('tender_api_url', None):
            source = TenderSource(config)
            engine.process_source(source)
        if config.get('ocds_dir', None):
            source = OcdsSource(config)
            engine.process_source(source)
        if config.get('plan_api_url', None):
            source = PlanSource(config)
            engine.process_source(source)
        if config.get('auction_api_url', None):
            source = AuctionSource(config)
            engine.process_source(source)
        engine.flush_orgs_map()
    except Exception as e:
        logger.exception("Exception: %s", str(e))
    finally:
        lock_file.close()
        os.remove(lock_filename)
        logger.info("Shutdown")

    return 0
Пример #32
0
 def read_config(self, config):
     self.verbosity = config.get("verbose", 0)
     self.log_config = self.abspath(config.get("log_config"))
     self.log_file = self.abspath(config.get("log_file"))
     if config.get("full_twisted_stacktraces"):
         debug_deferreds()
Пример #33
0
 def generate_files(self, config):
     log_config = config.get("log_config")
     if log_config and not os.path.exists(log_config):
         with open(log_config, "wb") as log_config_file:
             log_config_file.write(
                 DEFAULT_LOG_CONFIG.substitute(log_file=config["log_file"]))
Пример #34
0
    'formatters': {
        'simple': {
            'format': '[%(levelname)s/%(name)s] %(message)s',
        },
    },
    'handlers': {
        'console': {
            'level': 'DEBUG',
            'class': 'logging.StreamHandler',
            'formatter': 'simple',
        },
    },
    'loggers': {
        'tunneldigger': {
            'handlers': ['console'],
            'level': config.get('log', 'verbosity'),
        }
    }
}
logging.config.dictConfig(LOGGING_CONFIGURATION)

# Logger.
logger = logging.getLogger("tunneldigger.broker")
logger.info("Initializing the tunneldigger broker.")

# Initialize the event loop.
event_loop = eventloop.EventLoop()

# Initialize the hook manager.
hook_manager = hooks.HookManager(
    event_loop=event_loop,
Пример #35
0
host_name = str(sys.argv[1])
host_invMode = str(sys.argv[2])

host_id = 11008
#host_id = str(host_id)

print "\n---------------------------"
print "host_name: %s" % host_name
print "host_invMode: %s" % host_invMode
print "---------------------------\n"

# GET LOCAL CONFIGURATION
config = get_config("zdh.conf")
server = {}
server['url'] = config.get('server', 'url')
server['user'] = config.get('server', 'user')
server['password'] = config.get('server', 'password')
logger.debug("server     = '%s'" % server)

# LOGIN TO ZABBIX
zapi = ZabbixAPI(server['url'])
zapi.login(server['user'], server['password'])
logger.debug("connected to Zabbix API Version %s" % zapi.api_version())

#zapi.host.massupdate(hosts=hlookup,inventory_mode=invm)

#11008

if host_invMode == "auto": host_invMode = int(1)
elif host_invMode == "disabled": host_invMode = int(-1)
Пример #36
0
    def __init__(self, zookeeper, **kwargs):
        """
        :type zookeeper: kazoo.client.KazooClient
        """
        self._zookeeper = zookeeper
        self._settings = kwargs
        try:
            data, stat = self._zookeeper.get(ZOOM_CONFIG)
            config = json.loads(data)

            # create 'logs' directory if it does not exist
            if not os.path.exists("logs"):
                os.makedirs("logs")

            # initialize logging
            logging_config = config.get('logging')
            logging.config.dictConfig(logging_config)

            # get system type
            running_os = self._get_system()

            self._host = socket.gethostname()
            # web_server
            web_server_settings = config.get('web_server')
            self._port = self._get_setting('port',
                                           web_server_settings.get('port'))
            self._is_debug = web_server_settings.get('debug')

            self._application_path = os.getcwd()
            self._client_path = zk_path_join(
                (os.path.normpath(os.getcwd() + os.sep + os.pardir)), 'client')
            self._doc_path = zk_path_join(
                (os.path.normpath(os.getcwd() + os.sep + os.pardir)), "doc")
            self._html_path = zk_path_join(self._client_path, "views")
            self._images_path = zk_path_join(self._client_path, "images")
            self._pid = os.getpid()
            self._environment = self._get_setting(
                'environment', os.environ.get('EnvironmentToUse', 'Staging'))

            # zookeeper
            zookeeper_settings = config.get('zookeeper')
            self._zookeeper_paths = zookeeper_settings
            self._agent_configuration_path = zookeeper_settings.get(
                'agent_configuration_path')
            self._agent_state_path = zookeeper_settings.get('agent_state_path')
            self._task_path = zookeeper_settings.get('task_path')
            self._application_state_path = zookeeper_settings.get(
                'application_state_path')
            self._global_mode_path = zookeeper_settings.get('global_mode_path')
            self._pillar_path = zookeeper_settings.get('pillar_path')
            self._alert_path = zookeeper_settings.get('alert_path')
            self._override_node = zookeeper_settings.get(
                'override_node', '/spot/software/config/override')
            self._temp_directory = zookeeper_settings.get('temp_directory')
            self._zookeeper_host = get_zk_conn_string(self._environment)

            # pagerduty
            pagerduty_settings = config.get('pagerduty')
            self._pagerduty_default_svc_key = pagerduty_settings.get(
                'pagerduty_default_svc_key')
            self._pagerduty_api_token = pagerduty_settings.get(
                'pagerduty_api_token')
            self._pagerduty_subdomain = pagerduty_settings.get(
                'pagerduty_subdomain')
            self._pagerduty_enabled_environments = pagerduty_settings.get(
                'pagerduty_enabled_environments')
            self._pagerduty_alert_footer = pagerduty_settings.get(
                'pagerduty_footer', '')

            # database
            db_settings = config.get('database')
            self._db_type = db_settings.get('db_type')
            if running_os == PlatformType.WINDOWS:
                self._sql_connection = db_settings.get(
                    'sql_connection_windows')
            else:
                self._sql_connection = db_settings.get('sql_connection')

            # authentication
            ad_settings = config.get('active_directory')
            self._ldap_server = ad_settings.get('host')
            self._ldap_port = ad_settings.get('port')

            # environment specific
            env_settings = config.get(self._environment.lower())
            self._read_write_groups = env_settings.get('read_write_groups')
            self._graphite_host = env_settings.get('graphite_host')
            self._graphite_recheck = env_settings.get('graphite_recheck', '5m')

            # chatops
            chatops_settings = env_settings.get('chatops', {})
            self._chatops_url = chatops_settings.get('url')
            self._chatops_group = chatops_settings.get('group')
            self._chatops_commands_to_chat = chatops_settings.get(
                'commands_to_chat')

            # message throttling
            throttle_settings = config.get('message_throttle')
            self._throttle_interval = throttle_settings.get('interval')

            # salt
            self._salt_settings = env_settings.get('saltREST')

        except ValueError as e:
            logging.error('Data at {0} is not valid JSON.'.format(ZOOM_CONFIG))
            raise e
        except NoNodeError as e:
            logging.error('Config node missing: {}'.format(ZOOM_CONFIG))
            raise e
        except Exception as e:
            logging.exception('An unhandled exception occurred.')
            raise e
Пример #37
0
            keyVals = options.substitutions.split(",")
            for keyVal in keyVals:
                parts = keyVal.split(":", 1)
                subsDict[parts[0]] = parts[1]
                if (logger != None):
                    logger.debug("Substitution Key: %s Value: %s" %
                                 (parts[0], parts[1]))

        emailServerDict = {}
        config = ConfigParser.RawConfigParser()
        config.read(options.templateFile)
        emailServer = config.items("EmailServer")
        for entry in emailServer:
            key = entry[0]
            emailServerDict[key] = entry[1]
        recipients = config.get("To", "Recipients")
        recipients = recipients.split(",")
        messageNfoDict = {}
        messageNfo = config.items("Email")
        for entry in messageNfo:
            key = entry[0]
            messageNfoDict[key] = entry[1]

        smtp = smtpClass(emailServerDict['server'], emailServerDict['from'],
                         emailServerDict['pwd'])

        messageNfoDict['attachments'] = messageNfoDict['attachments'] % (
            subsDict)
        fileAttachemnts = messageNfoDict['attachments'].split(",")
        for filePath in fileAttachemnts:
            filePath = filePath.replace('"', '')
import logging.config
import os
from bamt.config import config
import warnings

log_file_path = config.get('LOG',
                           'log_conf_loc',
                           fallback='log_conf_path is not defined')

if not os.path.isdir(os.path.join(os.path.expanduser("~"), 'BAMT')):
    os.mkdir(os.path.join(os.path.expanduser("~"), 'BAMT'))

try:
    logging.config.fileConfig(log_file_path)
except:
    log_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                 'logging.conf')
    logging.config.fileConfig(log_file_path)
    warnings.warn(
        "Reading log path location from config file failed. Default location will be used instead."
    )

logger_builder = logging.getLogger('builder')
logger_network = logging.getLogger('network')
logger_preprocessor = logging.getLogger('preprocessor')
logger_nodes = logging.getLogger('nodes')

logging.captureWarnings(True)
logger_warnings = logging.getLogger('py.warnings')
Пример #39
0
    def _ws_single(request, context):
        """
         Single Row Processing for Websockets
        :param request: iterable sequence of bundled rows
        :return: the same iterable sequence as received
        """
        logging.info('Entering {} TimeStamp: {}'.format(
            function_name,
            datetime.now().strftime("%H:%M:%S.%f")))
        # Start by Gathering Environmental Varaiable

        host = socket.gethostname()
        ip_addr = socket.gethostbyname(host)
        ws_url = config.get(q_function_name, 'ws_url')
        token = config.get(q_function_name, 'token')
        user_name = config.get(q_function_name, 'username')
        ws_route = config.get(q_function_name, 'ws_route')
        bCache = config.get(q_function_name, 'cache')
        logging.debug('Pringint Route for WS {}'.format(ws_route))
        logging.debug("Caching is set to {}".format(bCache))
        if (bCache.lower() == "true"):
            logging.info(
                "Caching ****Enabled*** for {}".format(q_function_name))
        else:
            logging.info(
                "Caching ****Disabled**** for {}".format(q_function_name))
            md = (('qlik-cache', 'no-store'), )
            context.send_initial_metadata(md)
        # In Future we will use the Token for Liencensing and Throttling
        # Currently we are using Comblination of host+ipaddr+username for Client Identification
        ws_url = ws_url + host + '_' + ip_addr + '_' + user_name + '_'
        logging.debug('Websocket URL : {}'.format(ws_url))
        ws = create_connection(ws_url)
        response_rows = []
        for request_rows in request:
            # Iterate over rows
            # Default code
            for row in request_rows.rows:
                # Retrieve string value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.strData for d in row.duals][0]
                result = ''
                if (len(param) == 0):
                    logging.debug('Parameters are Empty')
                    result = 'Error'
                else:
                    payload = '{"action":"' + ws_route + \
                        '","data":"' + param + '"}'
                    logging.debug('Showing Payload: {}'.format(payload))
                    ws.send(payload)
                    #logging.info('Show  Payload Response: {}'.format(resp.text))
                    resp = json.loads(ws.recv())
                    logging.debug(resp)
                    result = resp['result']
                    logging.debug('Show  Result: {}'.format(result))
                    # Create an iterable of dual with the result
                    duals = iter([SSE.Dual(strData=result)])
                    response_rows.append(SSE.Row(duals=duals))
                # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=response_rows)
        ws.close()
        logging.info('Exiting {} TimeStamp: {}'.format(
            function_name,
            datetime.now().strftime("%H:%M:%S.%f")))
Пример #40
0
def main():
    '''Main(). Commandline parsing and stalker startup.'''

    parser = argparse.ArgumentParser()

    parser.add_argument("-p",
                        "--posttroll_port",
                        dest="posttroll_port",
                        help="Local port where messages are published")
    parser.add_argument("-t",
                        "--topic",
                        dest="topic",
                        help="Topic of the sent messages")
    parser.add_argument("-c",
                        "--configuration_file",
                        help="Name of the config.ini configuration file")
    parser.add_argument("-C",
                        "--config_item",
                        help="Name of the configuration item to use")
    parser.add_argument("-e",
                        "--event_names",
                        help="Name of the events to monitor")
    parser.add_argument("-f",
                        "--filepattern",
                        help="Filepath pattern used to parse "
                        "satellite/orbit/date/etc information")
    parser.add_argument("-i",
                        "--instrument",
                        help="Instrument name in the satellite")

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit()
    else:
        args = parser.parse_args()

    # Parse commandline arguments.  If args are given, they override
    # the configuration file.

    args_dict = vars(args)
    args_dict = {k: args_dict[k] for k in args_dict if args_dict[k] != None}

    config = {}

    if args.configuration_file is not None:
        config_fname = args.configuration_file

        if "template" in config_fname:
            print("Template file given as trollstalker logging config,"
                  " aborting!")
            sys.exit()

        cparser = ConfigParser()
        cparser.read(config_fname)
        config = dict(cparser.items(args.config_item, vars=args_dict))

    config.update(args_dict)

    config.update(
        {k: config[k].split(",")
         for k in config if "," in config[k]})

    config.setdefault("posttroll_port", "0")

    try:
        log_config = config["stalker_log_config"]
    except KeyError:
        try:
            loglevel = getattr(logging, config.get("loglevel", "DEBUG"))
            if loglevel == "":
                raise AttributeError
        except AttributeError:
            loglevel = logging.DEBUG

        LOGGER.setLevel(loglevel)
        rootlogger = logging.getLogger("")
        rootlogger.setLevel(loglevel)
        strhndl = logging.StreamHandler()
        strhndl.setLevel(loglevel)
        log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s"
        formatter = logging.Formatter(log_format)

        strhndl.setFormatter(formatter)
        rootlogger.addHandler(strhndl)
    else:
        logging.config.fileConfig(log_config)

    LOGGER.debug("Logger started")

    # Start watching for new files
    notifier = FilePublisher(config)
    notifier.start()

    try:
        while True:
            time.sleep(6000000)
    except KeyboardInterrupt:
        LOGGER.info("Interrupting TrollStalker")
    finally:
        notifier.stop()
Пример #41
0
        # Start gRPC server
        server.start()

        try:
            while True:
                time.sleep(_ONE_DAY_IN_SECONDS)
        except KeyboardInterrupt:
            server.stop(0)


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    #config.read(os.path.join(os.path.dirname(__file__), 'config', 'qrag.ini'))
    config.read(os.path.join(os.path.dirname(__file__), 'config', 'qrag.ini'))
    port = config.get('base', 'port')
    parser.add_argument('--port', nargs='?', default=port)
    parser.add_argument('--pem_dir', nargs='?')
    parser.add_argument('--definition_file',
                        nargs='?',
                        default='functions.json')
    args = parser.parse_args()
    # need to locate the file when script is called from outside it's location dir.
    def_file = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                            args.definition_file)
    print(def_file)
    logging.info(
        '*** Server Configurations Port: {}, Pem_Dir: {}, def_file {} TimeStamp: {} ***'
        .format(args.port, args.pem_dir, def_file,
                datetime.now().isoformat()))
    calc = ExtensionService(def_file)
Пример #42
0
 def _ws_batch(request, context):
     """
     Mirrors the input and sends back the same data.
     :param request: iterable sequence of bundled rows
     :return: the same iterable sequence as received
     """
     logging.info('Entering {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
     host = socket.gethostname()
     ip_addr = socket.gethostbyname(host)
     logging.debug('Calling qrag.ini section "{}'.format(q_function_name))
     ws_url = config.get(q_function_name, 'ws_url')
     token = config.get(q_function_name, 'token')
     user_name = config.get(q_function_name, 'username')
     batch_size = int(config.get(q_function_name, 'batch_size'))
     logging.debug('Batch Size {}'.format(batch_size))
     ws_route = config.get(q_function_name, 'ws_route')
     logging.info('API Route : {}'.format(ws_route))
     # setup Caching
     bCache = config.get(q_function_name, 'cache')
     logging.debug("Caching is set to {}".format(bCache))
     if (bCache.lower() == "true"):
         logging.info(
             "Caching ****Enabled*** for {}".format(q_function_name))
     else:
         logging.info(
             "Caching ****Disabled**** for {}".format(q_function_name))
         md = (('qlik-cache', 'no-store'), )
         context.send_initial_metadata(md)
     ws_url = ws_url + host + '_' + ip_addr + '_' + user_name + '_'
     logging.debug('Full url for ws: {} '.format(ws_url))
     ws = create_connection(ws_url)
     response_rows = []
     outer_counter = 1
     inner_counter = 1
     request_counter = 1
     for request_rows in request:
         logging.debug('Printing Request Rows - Request Counter {}'.format(
             request_counter))
         request_counter += 1
         temp = MessageToDict(request_rows)
         logging.debug('Temp Message to Dict {}'.format(temp))
         test_rows = temp['rows']
         logging.debug('Test Rows: {}'.format(test_rows))
         request_size = len(test_rows)
         logging.debug(
             'Bundled Row Number of  Rows - {}'.format(request_size))
         batches = list(qlist.divide_chunks(test_rows, batch_size))
         for i in batches:
             payload_t = {"action": ws_route}
             logging.debug('PreFix Route Seletection {}'.format(payload_t))
             logging.debug(len(batches))
             payload_t["data"] = i
             logging.debug('Size of payload {}'.format(
                 pysize.get_size(payload_t)))
             logging.debug('Showing Payload: {}'.format(payload_t))
             logging.debug('batch number {}'.format(outer_counter))
             ws.send(json.dumps(payload_t))
             logging.debug('message sent WS')
             outer_counter += 1
             payload_t.clear()
             for j in i:
                 #logging.debug("Priniting i {}" .format(i))
                 resp = json.loads(ws.recv())
                 #logging.debug('Response Type : {}' .format(type(resp)))
                 logging.debug(
                     'Counter: {} Payload Size: {}  Payload Response: {}'.
                     format(inner_counter, pysize.get_size(resp), resp))
                 inner_counter += 1
                 result = resp['result']
                 logging.debug('Log Resulst: {}'.format(result))
                 duals = iter([SSE.Dual(strData=result)])
                 # logging.debug(duals)
                 #logging.debug('Printing Duals {}' .format(duals))
                 # Yield the row data as bundled rows
                 response_rows.append(SSE.Row(duals=duals))
                 logging.debug(
                     'Exiting Inner Loop: Printing j {}'.format(j))
             yield SSE.BundledRows(rows=response_rows)
     ws.close()
     logging.info('Exiting {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
Пример #43
0
def get_dict_from_old_falcon_cfg(config):
    global job_type  # TODO: Stop using global for wait_for_file().
    job_type = "SGE"
    section = 'General'
    if config.has_option(section, 'job_type'):
        job_type = config.get(section, 'job_type')

    pa_concurrent_jobs = 8
    if config.has_option(section, 'pa_concurrent_jobs'):
        pa_concurrent_jobs = config.getint(section, 'pa_concurrent_jobs')

    cns_concurrent_jobs = 8
    if config.has_option(section, 'cns_concurrent_jobs'):
        cns_concurrent_jobs = config.getint(section, 'cns_concurrent_jobs')

    ovlp_concurrent_jobs = 8
    if config.has_option(section, 'ovlp_concurrent_jobs'):
        ovlp_concurrent_jobs = config.getint(section, 'ovlp_concurrent_jobs')

    #appending = False
    #if config.has_option(section, 'appending'):
    #    appending = config.get(section, 'appending')
    #    if appending == "True":
    #        appending = True

    #openending = False
    #if config.has_option(section, 'openending'):
    #    openending = config.get(section, 'openending')
    #    if openending == "True":
    #        openending = True

    input_type = "raw"
    if config.has_option(section, 'input_type'):
        input_type = config.get(section, 'input_type')

    overlap_filtering_setting = """--max_diff 1000 --max_cov 1000 --min_cov 2"""
    if config.has_option(section, 'overlap_filtering_setting'):
        overlap_filtering_setting = config.get(section,
                                               'overlap_filtering_setting')

    pa_HPCdaligner_option = """-v -D24 -t16 -e.70 -l1000 -s100"""
    if config.has_option(section, 'pa_HPCdaligner_option'):
        pa_HPCdaligner_option = config.get(section, 'pa_HPCdaligner_option')

    ovlp_HPCdaligner_option = """ -v -D24 -t32 -h60 -e.96 -l500 -s1000"""
    if config.has_option(section, 'ovlp_HPCdaligner_option'):
        ovlp_HPCdaligner_option = config.get(section,
                                             'ovlp_HPCdaligner_option')

    pa_HPCdaligner_option = update_HPCdaligner_option(pa_HPCdaligner_option)
    ovlp_HPCdaligner_option = update_HPCdaligner_option(
        ovlp_HPCdaligner_option)

    pa_DBsplit_option = """ -x500 -s200"""
    if config.has_option(section, 'pa_DBsplit_option'):
        pa_DBsplit_option = config.get(section, 'pa_DBsplit_option')

    dust = False
    if config.has_option(section, 'dust'):
        dust = config.getboolean(section, 'dust')

    pa_DBdust_option = "-w128 -t2.5 -m20"
    if config.has_option(section, 'pa_DBdust_option'):
        pa_DBdust_option = config.get(section, 'pa_DBdust_option')

    dazcon = False
    if config.has_option(section, 'dazcon'):
        dazcon = config.getboolean(section, 'dazcon')

    pa_dazcon_option = "-j 4 -x -l 500"
    if config.has_option(section, 'pa_dazcon_option'):
        pa_dazcon_option = config.get(section, 'pa_dazcon_option')

    ovlp_DBsplit_option = """ -x500 -s200"""
    if config.has_option(section, 'ovlp_DBsplit_option'):
        ovlp_DBsplit_option = config.get(section, 'ovlp_DBsplit_option')

    falcon_sense_option = """ --output_multi --min_idt 0.70 --min_cov 2 --max_n_read 1800 --n_core 6"""
    if config.has_option(section, 'falcon_sense_option'):
        falcon_sense_option = config.get(section, 'falcon_sense_option')
    if 'local_match_count' in falcon_sense_option or 'output_dformat' in falcon_sense_option:
        raise Exception(
            'Please remove obsolete "--local_match_count_*" or "--output_dformat"'
            + ' from "falcon_sense_option" in your cfg: %s' %
            repr(falcon_sense_option))

    falcon_sense_skip_contained = False
    if config.has_option(section, 'falcon_sense_skip_contained'):
        falcon_sense_skip_contained = config.get(
            section, 'falcon_sense_skip_contained')
        if falcon_sense_skip_contained in ["True", "true", "1"]:
            falcon_sense_skip_contained = True
        else:
            falcon_sense_skip_contained = False

    genome_size = 0
    if config.has_option(section, 'genome_size'):
        genome_size = config.getint(section, 'genome_size')

    seed_coverage = 20
    if config.has_option(section, 'seed_coverage'):
        seed_coverage = config.getfloat(section, 'seed_coverage')

    length_cutoff = -1
    if config.has_option(section, 'length_cutoff'):
        length_cutoff = config.getint(section, 'length_cutoff')
    if length_cutoff < 0:
        if genome_size < 1:
            raise Exception(
                'Must specify either length_cutoff>0 or genome_size>0')

    length_cutoff_pr = config.getint(section, 'length_cutoff_pr')
    input_fofn_fn = config.get(section, 'input_fofn')

    # This one depends on length_cutoff_pr for its default.
    fc_ovlp_to_graph_option = ''
    if config.has_option(section, 'fc_ovlp_to_graph_option'):
        fc_ovlp_to_graph_option = config.get(section,
                                             'fc_ovlp_to_graph_option')
    if '--min_len' not in fc_ovlp_to_graph_option:
        fc_ovlp_to_graph_option += ' --min_len %d' % length_cutoff_pr

    bestn = 12
    if config.has_option(section, 'bestn'):
        bestn = config.getint(section, 'bestn')

    if config.has_option(section, 'target'):
        target = config.get(section, 'target')
        if target not in ["overlapping", "pre-assembly", "assembly"]:
            msg = """ Target has to be "overlapping", "pre-assembly" or "assembly" in this verison. You have an unknown target %s in the configuration file.  """ % target
            raise Exception(msg)
    else:
        logger.info(""" No target specified, assuming "assembly" as target """)
        target = "assembly"

    if config.has_option(section, 'stop_all_jobs_on_failure'):
        stop_all_jobs_on_failure = config.getboolean(
            section, 'stop_all_jobs_on_failure')
    else:
        # Good default. Rarely needed, since we already stop early if *all* tasks fail
        # in a given refresh.
        stop_all_jobs_on_failure = False
    if config.has_option(section, 'use_tmpdir'):
        use_tmpdir = config.getboolean(section, 'use_tmpdir')
    else:
        use_tmpdir = False

    TEXT_FILE_BUSY = 'avoid_text_file_busy'
    if config.has_option(section, TEXT_FILE_BUSY):
        bash.BUG_avoid_Text_file_busy = config.getboolean(
            section, TEXT_FILE_BUSY)

    hgap_config = {#"input_fofn_fn" : input_fofn_fn, # deprecated
                   "input_fofn" : input_fofn_fn,
                   "target" : target,
                   "job_type" : job_type,
                   "input_type": input_type,
                   #"openending": openending,
                   "pa_concurrent_jobs" : pa_concurrent_jobs,
                   "ovlp_concurrent_jobs" : ovlp_concurrent_jobs,
                   "cns_concurrent_jobs" : cns_concurrent_jobs,
                   "overlap_filtering_setting": overlap_filtering_setting,
                   "genome_size" : genome_size,
                   "seed_coverage" : seed_coverage,
                   "length_cutoff" : length_cutoff,
                   "length_cutoff_pr" : length_cutoff_pr,
                   "sge_option_da": config.get(section, 'sge_option_da'),
                   "sge_option_la": config.get(section, 'sge_option_la'),
                   "sge_option_pda": config.get(section, 'sge_option_pda'),
                   "sge_option_pla": config.get(section, 'sge_option_pla'),
                   "sge_option_fc": config.get(section, 'sge_option_fc'),
                   "sge_option_cns": config.get(section, 'sge_option_cns'),
                   "pa_HPCdaligner_option": pa_HPCdaligner_option,
                   "ovlp_HPCdaligner_option": ovlp_HPCdaligner_option,
                   "pa_DBsplit_option": pa_DBsplit_option,
                   "dust": dust,
                   "pa_DBdust_option": pa_DBdust_option,
                   "dazcon": dazcon,
                   "pa_dazcon_option": pa_dazcon_option,
                   "ovlp_DBsplit_option": ovlp_DBsplit_option,
                   "fc_ovlp_to_graph_option": fc_ovlp_to_graph_option,
                   "falcon_sense_option": falcon_sense_option,
                   "falcon_sense_skip_contained": falcon_sense_skip_contained,
                   "stop_all_jobs_on_failure": stop_all_jobs_on_failure,
                   "use_tmpdir": use_tmpdir,
                   TEXT_FILE_BUSY: bash.BUG_avoid_Text_file_busy,
                   }
    provided = dict(config.items(section))
    unused = set(provided) - set(k.lower() for k in hgap_config)
    if unused:
        import warnings
        warnings.warn("Unexpected keys in input config: %s" % repr(unused))

    hgap_config["install_prefix"] = sys.prefix

    return hgap_config
Пример #44
0
 def _rest_template(request, context):
     """
     Rest using template
     """
     logging.info('Entering {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
     url = config.get(q_function_name, 'url')
     logging.debug("Rest Url is set to {}".format(url))
     query_parameter = config.get(q_function_name, 'query_parameter')
     logging.debug("Rest Url is set to {}".format(query_parameter))
     bCache = config.get(q_function_name, 'cache')
     logging.debug("Caching is set to {}".format(bCache))
     if (bCache.lower() == "true"):
         logging.info(
             "Caching ****Enabled*** for {}".format(q_function_name))
     else:
         logging.info(
             "Caching ****Disabled**** for {}".format(q_function_name))
         md = (('qlik-cache', 'no-store'), )
         context.send_initial_metadata(md)
     response_rows = []
     request_counter = 1
     for request_rows in request:
         logging.debug('Printing Request Rows - Request Counter {}'.format(
             request_counter))
         request_counter = request_counter + 1
         for row in request_rows.rows:
             # Retrieve string value of parameter and append to the params variable
             # Length of param is 1 since one column is received, the [0] collects the first value in the list
             param = [d.strData for d in row.duals]
             print(param)
             print(type(param))
             # Join with current timedate stamp
             if (len(param) == 0):
                 logging.info(
                     'Exiting {} TimeStamp: {} due to Data being Empty '.
                     format(function_name,
                            datetime.now().strftime("%H:%M:%S.%f")))
             else:
                 qp0 = param[0]
                 qp1 = param[1]
                 logging.debug('Showing Payload: {}'.format(qp0))
                 logging.debug('Showing Payload: {}'.format(qp1))
                 logging.debug(
                     'Showing Payload: {}'.format(query_parameter))
                 parameterized_url = url + qp0 + '?' + query_parameter + '=' + qp1 + '&'
                 logging.debug(
                     'Showing Payload: {}'.format(parameterized_url))
                 resp = requests.get(parameterized_url)
                 logging.debug('Type: {}'.format(type(resp)))
                 logging.debug('Show Payload Response as Text: {}'.format(
                     resp.json()))
                 result = resp.json()
                 result = result.get('ratings')
                 #result = result.strip()
                 logging.debug('Show  Result: {}'.format(result))
                 # Create an iterable of dual with the result
                 for i in result:
                     retData = str(i)
                     print(retData)
                     print(type(retData))
                 duals = iter([SSE.Dual(strData=retData)])
                 response_rows.append(SSE.Row(duals=duals))
             # Yield the row data as bundled rows
     yield SSE.BundledRows(rows=response_rows)
     logging.info('Exiting {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
Пример #45
0
            raw_list = None

        loc_raw = args.raw_repo
        loc_output = args.loc_out

        config = ConfigParser.RawConfigParser()
        config.read(
            os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
                         'moff_setting.properties'))
        df = pd.read_csv(file_name, sep="\t")
        ## add same safety checks len > 1
        ## Flag for pride pipeline, or to set from second to minute as input rt time scale
        moff_pride_flag = 0
        if moff.check_ps_input_data(
                df.columns.tolist(),
                ast.literal_eval(config.get('moFF', 'moffpride_format'))) == 1:
            # if it is a moff_pride data I do not check aany other requirement
            log.critical('moffPride input detected')
            moff_pride_flag = 1
        else:
            if not 'matched' in df.columns:
                # check if it is a PS file ,
                list_name = df.columns.values.tolist()
                # get the lists of PS  defaultcolumns from properties file
                list = ast.literal_eval(
                    config.get('moFF', 'ps_default_export_v1'))
                # here it controls if the input file is a PS export; if yes it maps the input in right moFF name
                if moff.check_ps_input_data(list_name, list) == 1:
                    # map  the columns name according to moFF input requirements
                    if args.peptide_summary != 1:
                        data_ms2, list_name = moff.map_ps2moff(
Пример #46
0
def main():
    parser = argparse.ArgumentParser(description='FIX Gateway')
    parser.add_argument('--debug',
                        action='store_true',
                        help='Run in debug mode')
    parser.add_argument('--config-file',
                        type=argparse.FileType('r'),
                        help='Alternate configuration file')
    parser.add_argument('--log-config',
                        type=argparse.FileType('w'),
                        help='Alternate logger configuration file')

    args, unknown_args = parser.parse_known_args()

    logging.config.fileConfig(logconfig_file)
    log = logging.getLogger()
    if args.debug:
        log.setLevel(logging.DEBUG)
    log.info("Starting FIX Gateway")

    config = configparser.ConfigParser()
    # To kepp configparser from making everythign lowercase
    config.optionxform = str
    config.read(config_file)
    try:
        database.init(config)
    except Exception as e:
        log.error("Database failure, Exiting")
        print(e)
        raise
        return  # we don't want to run with a screwed up database

    log.info("Setting Initial Values")
    try:
        for item in config.items("initial"):
            database.write(item[0], item[1])
    except Exception as e:
        log.error(
            "Problem setting initial values from configuration - {0}".format(
                e))

    # TODO: Add a hook here for post database creation code

    # TODO: Need to do some more thorough error checking here

    # run through the plugin_list dict and find all the plugins that are
    # configured to be loaded and load them.

    for each in config:
        if each[:5] == "conn_":
            if config.getboolean(each, "load"):
                module = config.get(each, "module")
                try:
                    load_plugin(each[5:], module, config)
                except Exception as e:
                    logging.critical("Unable to load module - " + module +
                                     ": " + str(e))

    # TODO add a hook here for pre module run code

    for each in plugins:
        plugins[each].run()

    iteration = 0
    while True:
        try:
            job = plugin.jobQueue.get(timeout=1.0)
            if job == "QUIT":
                break
        except KeyboardInterrupt:
            log.info("Termination from keybaord received")
            break
        except queue.Empty:
            pass
        iteration += 1
        # Every four times through the loop we do some stuff
        if iteration % 4 == 0:
            # check how many plugins are running and exit if zero
            running_count = 0
            for each in plugins:
                if plugins[each].is_running():
                    running_count += 1
            if running_count == 0:
                log.info("No plugins running, quitting")
                break

    for each in plugins:
        plugins[each].stop()

    log.info("FIX Gateway Exiting Normally")
Пример #47
0
def create_app(main_module=False):
    app = Flask(__name__, static_url_path='/static', static_folder='static')

    babel = Babel(app)

    app.config['BABEL_TRANSLATION_DIRECTORIES'] = os.path.abspath(
        'caipirinha/i18n/locales')
    logging.config.fileConfig('logging_config.ini')

    app.secret_key = 'l3m0n4d1'

    # CORS
    CORS(app, resources={r"/*": {"origins": "*"}})

    # Swagger
    swaggerui_blueprint = get_swaggerui_blueprint(
        '/api/docs',
        '/static/swagger.yaml',
        config={  # Swagger UI config overrides
            'app_name': "Lemonade Caipirinha"
        },
        # oauth_config={  # OAuth config. See https://github.com/swagger-api/swagger-ui#oauth2-configuration .
        #    'clientId': "your-client-id",
        #    'clientSecret': "your-client-secret-if-required",
        #    'realm': "your-realms",
        #    'appName': "your-app-name",
        #    'scopeSeparator': " ",
        #    'additionalQueryStringParams': {'test': "hello"}
        # }
    )

    app.register_blueprint(swaggerui_blueprint)

    # Error handling
    app.register_error_handler(404, handle_exception)

    # API
    api = Api(app)

    mappings = {
        '/dashboards': [DashboardListApi, 'dashboardList'],
        '/public/dashboard/<h>': [PublicDashboardApi, 'publicDashboard'],
        '/dashboards/<int:dashboard_id>':
        [DashboardDetailApi, 'dashboardDetail'],
        '/visualizations/<int:job_id>/<task_id>/<int:vis_id>':
        [VisualizationDetailApi, 'visualizationDetail'],
        '/visualizations/<int:job_id>/<task_id>':
        [VisualizationDetailApi, 'visualizationDetailOld'],
        '/visualizations/<int:job_id>/<task_id>':
        [VisualizationDetailApi, 'visualizationDetailOld'],
        '/public/visualization/<int:job_id>/<task_id>/<int:vis_id>':
        [PublicVisualizationApi, 'publicVisualization'],
        '/visualizations': [VisualizationListApi, 'visualizationList'],
        '/texts': [TextListApi, 'textList'],
        '/texts/<int:text_id>': [TextDetailApi, 'textDetail'],
    }
    for path, view in list(mappings.items()):
        api.add_resource(view[0], path, endpoint=view[1])

    config_file = os.environ.get('CAIPIRINHA_CONFIG')

    os.chdir(os.environ.get('CAIPIRINHA_HOME', '.'))
    logger = logging.getLogger(__name__)

    migrate = Migrate(app, db)

    @babel.localeselector
    def get_locale():
        user = getattr(flask_g, 'user', None)
        if user is not None and user.locale:
            return user.locale
        else:
            return request.args.get(
                'lang', request.accept_languages.best_match(['en', 'pt',
                                                             'es']))

    sqlalchemy_utils.i18n.get_locale = get_locale

    if config_file:
        with open(config_file) as f:
            config = yaml.load(f, Loader=yaml.FullLoader)['caipirinha']

        app.config["RESTFUL_JSON"] = {"cls": app.json_encoder}

        server_config = config.get('servers', {})
        app.config['SQLALCHEMY_DATABASE_URI'] = server_config.get(
            'database_url')
        app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

        engine_config = config.get('config', {})
        if engine_config:
            final_config = {'pool_pre_ping': True}
            if 'mysql://' in app.config['SQLALCHEMY_DATABASE_URI']:
                if 'SQLALCHEMY_POOL_SIZE' in engine_config:
                    final_config['pool_size'] = engine_config[
                        'SQLALCHEMY_POOL_SIZE']
                if 'SQLALCHEMY_POOL_RECYCLE' in engine_config:
                    final_config['pool_recycle'] = engine_config[
                        'SQLALCHEMY_POOL_RECYCLE']
            app.config['SQLALCHEMY_ENGINE_OPTIONS'] = final_config

        app.config['CAIPIRINHA_CONFIG'] = config

        db.init_app(app)

        port = int(config.get('port', 5000))
        logger.debug('Running in %s mode', config.get('environment'))

        if main_module:
            if config.get('environment', 'dev') == 'dev':
                app.run(debug=True, port=port)
            else:
                eventlet.wsgi.server(eventlet.listen(('', port)), app)
        else:
            return app
    else:
        logger.error('Please, set CAIPIRINHA_CONFIG environment variable')
        exit(1)
Пример #48
0
import os
import logging.config
from holygrail import optHisNavHihestWinLowest
from configparser import ConfigParser
from holygrail.dealutils import *
from holygrail.btinfo import Btinfo

logging.config.fileConfig(
    '/Users/lamter/workspace/SlaveO/holygrail/tmp/logging.ini')
configIni = '/Users/lamter/workspace/SlaveO/holygrail/tmp/oscillationDonchian_opt.ini'

config = ConfigParser()
config.read(configIni)

path = config.get('优化', 'path')
db = getDB(config)

group, className = '测试ATR通道震荡', 'OscillationAtrChannelStrategy'

periodList = ['{}BM'.format(i) for i in range(1, 13)]
histNavRangeList = list(range(1, 101))

# histNavRangeList = list(range(10, 25))

btinfoCol = db[config.get('回测相关数MongoDB据库', 'btinfo')]

d = btinfoCol.find_one({'group': group, 'className': className}, {'_id': 0})
btInfo = Btinfo(**d)
usList = btInfo.underlyingSymbols[:]
usList = ['rb']
Пример #49
0
import logging, time, sys, logging.config, autologging, os
import ConfigParser
# All the logging bootstrapping is done here
env = os.getenv("env", "sandbox")
config_file = "/etc/kafka-backpressure-monitor/settings_{}.ini".format(env)
config = ConfigParser.ConfigParser()
config.read(config_file)
log_level_key = 'consumer_log_level'
consumer_log_level = config.get('log', log_level_key)
consumer_log_level = os.getenv(log_level_key, consumer_log_level).upper()
# or we can just use getattr() instead as commented below
if consumer_log_level == 'CRITICAL':
    log_level = logging.CRITICAL
elif consumer_log_level == 'ERROR':
    log_level = logging.ERROR
elif consumer_log_level == 'WARNING':
    log_level = logging.WARNING
elif consumer_log_level == 'INFO':
    log_level = logging.INFO
else:
    log_level = logging.DEBUG

out_type = config.get('log', 'out_type')
log_location = config.get('log', 'location')
log_file = log_location + "kafka_backpressure_monitor.log"

# Handles all the loggers to use across the project
gen_logger = logging.getLogger("kafka.backpressure")
#log_level = getattr(gen_logger, consumer_log_level)
gen_logger.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s:process %(process)d:process name %(processName)s:%(funcName)s():line %(lineno)d:%(message)s')
Пример #50
0
    def _transition_queued_actions_to_pending(self):
        _logger.info('transitioning queued actions to pending')
        action_service = self._action_service
        engine_service = self._engine_service
        datastore_service = self._datastore_service
        assert isinstance(action_service, ActionService)
        assert isinstance(engine_service, EngineService)
        assert isinstance(datastore_service, DatastoreService)
        queued_actions = action_service.find_actions(
            states=[ActionState.QUEUED])
        for action in queued_actions:
            try:
                datastore = datastore_service.get_datastore(
                    action.data.datastore_id, raise_when_missing=False)
                if not datastore or datastore.data.state != DatastoreState.ACTIVE:
                    continue

                states = [
                    ActionState.PENDING, ActionState.RUNNING,
                    ActionState.FINISHING
                ]
                action_count = self._action_service.find_action_count(
                    datastore.id, states)
                if action_count >= datastore.data.concurrency:
                    _logger.info(
                        'datastore (id=%s) has reached max concurrency' %
                        datastore.id)
                    continue

                # conditionally updating queued actions as pending allows multiple concurrent engine workers if needed
                action_service.update_action_state(
                    action=action,
                    state=ActionState.PENDING,
                    error_message=action.data.error_message,
                    conditional=lambda a: a.data.state == ActionState.QUEUED)

                engine = engine_service.get_engine_by_name(
                    action.data.engine_name)

                # our best granualrity of a user_id to identifu who is running this workflow's action.
                datastore_user_id = datastore.data.user_id if hasattr(
                    datastore.data, 'user_id') else 'anonymous'
                if self.dart_config['dart'].get('use_local_engines'):
                    config = self.dart_config['engines'][engine.data.name]
                    engine_instance = locate(
                        config['path'])(**config.get('options', {}))
                    self._launch_in_memory_engine(engine, engine_instance,
                                                  action, datastore_user_id)
                    # empty string allows differentiation from null, yet is still falsey
                    action_service.update_action_batch_job_id(action, '')

                elif engine.data.ecs_task_definition_arn:
                    batch_job_id = self._try_run_task(action,
                                                      datastore_user_id)
                    if batch_job_id:
                        action_service.update_action_batch_job_id(
                            action, batch_job_id)
                    else:
                        # no batch job-id means there isn't enough capacity at the moment, so try again later
                        action_service.update_action_state(
                            action, ActionState.QUEUED,
                            action.data.error_message)

                else:
                    msg = 'engine %s has no ecs_task_definition and local engines are not allowed'
                    raise Exception(msg % engine.data.name)

            except DartConditionalUpdateFailedException:
                # another engine worker picked it up
                continue

            except Exception as e:
                error_message = e.message + '\n\n\n' + traceback.format_exc()
                _logger.error(
                    'error transitioning action (id=%s) to PENDING: %s' %
                    (action.id, error_message))

            finally:
                db.session.rollback()
Пример #51
0
        data_dir.mkdir(parents=True, exist_ok=True)

    return config_dict


config = Config(env_prefix="A3M", attrs=CONFIG_MAPPING)
config.read_defaults(StringIO(CONFIG_DEFAULTS))
config.read_files(["/etc/a3m/a3m.cfg"])
config.read_dict(_get_data_dir_defaults(config))


# Django

DATABASES = {
    "default": {
        "ENGINE": config.get("db_engine"),
        "NAME": config.get("db_name"),
        "USER": config.get("db_user"),
        "PASSWORD": config.get("db_password"),
        "HOST": config.get("db_host"),
        "PORT": config.get("db_port"),
        "CONN_MAX_AGE": 3600,
        "OPTIONS": {"timeout": 5},
    }
}

MIDDLEWARE_CLASSES = ()

TEMPLATES = [{"BACKEND": "django.template.backends.django.DjangoTemplates"}]

INSTALLED_APPS = ("a3m.main", "a3m.fpr")
Пример #52
0
import logging.config
import os

from config42 import ConfigManager

from instabot_py.default_config import DEFAULT_CONFIG

env_config = ConfigManager(prefix="INSTABOT")
logging.basicConfig(
    level=logging.DEBUG if env_config.get("debug") else logging.INFO)
LOGGER = logging.getLogger(__name__)
config = ConfigManager()
config.set_many(DEFAULT_CONFIG)

config.set_many(env_config.as_dict())
config_file = config.get("config.file")
config_etcd = config.get("config.etcd")

if config_file:
    if config_file.startswith("/"):
        config_path = config_file
    else:
        cwd = os.getcwd()
        config_path = cwd + "/" + config_file
    config.set_many(
        ConfigManager(path=config_path.replace('//', '/')).as_dict())
    LOGGER.info("Setting configuration from {} : OK".format(config_file))

if config_etcd:
    if not config_etcd.get("keyspace"):
        raise Exception("etcd Keyspace is mandatory")
Пример #53
0
def getFFmpegWait():
    if config.has_option('Server', 'ffmpeg_wait'):
        return max(int(float(config.get('Server', 'ffmpeg_wait'))), 1)
    else:
        return 0
Пример #54
0
    """
    log = logging.getLogger(__name__)
    config = ConfigParser.RawConfigParser()
    try:
        log_path = config.get('LOG_Path', 'log_path')
    except ConfigParser.ParsingError, err:
        print 'Could not parse:', err
    mkdir_log(log_path)
    fullPath = findLiotaConfigFullPath().get_liota_fullpath()
    if fullPath != '':
        try:
            if config.read(fullPath) != []:
                # now use json file for logging settings
                try:
                    log_cfg = config.get('LOG_CFG', 'json_path')
                except ConfigParser.ParsingError, err:
                    print 'Could not parse:', err
            else:
                raise IOError('Cannot open configuration file ' + fullPath)
        except IOError, err:
            print 'Could not open:', err
        if os.path.exists(log_cfg):
            with open(log_cfg, 'rt') as f:
                config = json.load(f)
            logging.config.dictConfig(config)
            log.info('created logger with ' + log_cfg)
        else:
            # missing logging.json file
            logging.basicConfig(level=default_level)
            log.warn(
Пример #55
0
def get_config(parse_args=True, cfg_path=None, options=None):
    if parse_args:
        options, _ = get_parsed_args()

    # General config
    agentConfig = {
        'check_freq': DEFAULT_CHECK_FREQUENCY,
        'dogstatsd_port': 8125,
        'dogstatsd_target': 'http://localhost:17123',
        'graphite_listen_port': None,
        'hostname': None,
        'listen_port': None,
        'tags': None,
        'use_ec2_instance_id': False,  # DEPRECATED
        'version': get_version(),
        'watchdog': True,
        'additional_checksd': '/etc/dd-agent/checks.d/',
        'bind_host': get_default_bind_host(),
        'statsd_metric_namespace': None,
        'utf8_decoding': False
    }

    if Platform.is_mac():
        agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d'

    # Config handling
    try:
        # Find the right config file
        path = os.path.realpath(__file__)
        path = os.path.dirname(path)

        config_path = get_config_path(cfg_path, os_name=get_os())
        config = ConfigParser.ConfigParser()
        config.readfp(skip_leading_wsp(open(config_path)))

        # bulk import
        for option in config.options('Main'):
            agentConfig[option] = config.get('Main', option)

        # Store developer mode setting in the agentConfig
        if config.has_option('Main', 'developer_mode'):
            agentConfig['developer_mode'] = _is_affirmative(
                config.get('Main', 'developer_mode'))

        # Allow an override with the --profile option
        if options is not None and options.profile:
            agentConfig['developer_mode'] = True

        #
        # Core config
        #

        # FIXME unnecessarily complex
        agentConfig['use_forwarder'] = False
        if options is not None and options.use_forwarder:
            listen_port = 17123
            if config.has_option('Main', 'listen_port'):
                listen_port = int(config.get('Main', 'listen_port'))
            agentConfig['dd_url'] = "http://" + agentConfig[
                'bind_host'] + ":" + str(listen_port)
            agentConfig['use_forwarder'] = True
        elif options is not None and not options.disable_dd and options.dd_url:
            agentConfig['dd_url'] = options.dd_url
        else:
            agentConfig['dd_url'] = config.get('Main', 'dd_url')
        if agentConfig['dd_url'].endswith('/'):
            agentConfig['dd_url'] = agentConfig['dd_url'][:-1]

        # Extra checks.d path
        # the linux directory is set by default
        if config.has_option('Main', 'additional_checksd'):
            agentConfig['additional_checksd'] = config.get(
                'Main', 'additional_checksd')
        elif get_os() == 'windows':
            # default windows location
            common_path = _windows_commondata_path()
            agentConfig['additional_checksd'] = os.path.join(
                common_path, 'Datadog', 'checks.d')

        if config.has_option('Main', 'use_dogstatsd'):
            agentConfig['use_dogstatsd'] = config.get(
                'Main', 'use_dogstatsd').lower() in ("yes", "true")
        else:
            agentConfig['use_dogstatsd'] = True

        # Concerns only Windows
        if config.has_option('Main', 'use_web_info_page'):
            agentConfig['use_web_info_page'] = config.get(
                'Main', 'use_web_info_page').lower() in ("yes", "true")
        else:
            agentConfig['use_web_info_page'] = True

        # Which API key to use
        agentConfig['api_key'] = config.get('Main', 'api_key')

        # local traffic only? Default to no
        agentConfig['non_local_traffic'] = False
        if config.has_option('Main', 'non_local_traffic'):
            agentConfig['non_local_traffic'] = config.get(
                'Main', 'non_local_traffic').lower() in ("yes", "true")

        # DEPRECATED
        if config.has_option('Main', 'use_ec2_instance_id'):
            use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id')
            # translate yes into True, the rest into False
            agentConfig['use_ec2_instance_id'] = (
                use_ec2_instance_id.lower() == 'yes')

        if config.has_option('Main', 'check_freq'):
            try:
                agentConfig['check_freq'] = int(
                    config.get('Main', 'check_freq'))
            except Exception:
                pass

        # Custom histogram aggregate/percentile metrics
        if config.has_option('Main', 'histogram_aggregates'):
            agentConfig['histogram_aggregates'] = get_histogram_aggregates(
                config.get('Main', 'histogram_aggregates'))

        if config.has_option('Main', 'histogram_percentiles'):
            agentConfig['histogram_percentiles'] = get_histogram_percentiles(
                config.get('Main', 'histogram_percentiles'))

        # Disable Watchdog (optionally)
        if config.has_option('Main', 'watchdog'):
            if config.get('Main', 'watchdog').lower() in ('no', 'false'):
                agentConfig['watchdog'] = False

        # Optional graphite listener
        if config.has_option('Main', 'graphite_listen_port'):
            agentConfig['graphite_listen_port'] = \
                int(config.get('Main', 'graphite_listen_port'))
        else:
            agentConfig['graphite_listen_port'] = None

        # Dogstatsd config
        dogstatsd_defaults = {
            'dogstatsd_port': 8125,
            'dogstatsd_target':
            'http://' + agentConfig['bind_host'] + ':17123',
        }
        for key, value in dogstatsd_defaults.iteritems():
            if config.has_option('Main', key):
                agentConfig[key] = config.get('Main', key)
            else:
                agentConfig[key] = value

        # Create app:xxx tags based on monitored apps
        agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \
            _is_affirmative(config.get('Main', 'create_dd_check_tags'))

        # Forwarding to external statsd server
        if config.has_option('Main', 'statsd_forward_host'):
            agentConfig['statsd_forward_host'] = config.get(
                'Main', 'statsd_forward_host')
            if config.has_option('Main', 'statsd_forward_port'):
                agentConfig['statsd_forward_port'] = int(
                    config.get('Main', 'statsd_forward_port'))

        # optionally send dogstatsd data directly to the agent.
        if config.has_option('Main', 'dogstatsd_use_ddurl'):
            if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')):
                agentConfig['dogstatsd_target'] = agentConfig['dd_url']

        # Optional config
        # FIXME not the prettiest code ever...
        if config.has_option('Main', 'use_mount'):
            agentConfig['use_mount'] = _is_affirmative(
                config.get('Main', 'use_mount'))

        if options is not None and options.autorestart:
            agentConfig['autorestart'] = True
        elif config.has_option('Main', 'autorestart'):
            agentConfig['autorestart'] = _is_affirmative(
                config.get('Main', 'autorestart'))

        if config.has_option('Main', 'check_timings'):
            agentConfig['check_timings'] = _is_affirmative(
                config.get('Main', 'check_timings'))

        if config.has_option('Main', 'exclude_process_args'):
            agentConfig['exclude_process_args'] = _is_affirmative(
                config.get('Main', 'exclude_process_args'))

        try:
            filter_device_re = config.get('Main', 'device_blacklist_re')
            agentConfig['device_blacklist_re'] = re.compile(filter_device_re)
        except ConfigParser.NoOptionError:
            pass

        if config.has_option('datadog', 'ddforwarder_log'):
            agentConfig['has_datadog'] = True

        # Dogstream config
        if config.has_option("Main", "dogstream_log"):
            # Older version, single log support
            log_path = config.get("Main", "dogstream_log")
            if config.has_option("Main", "dogstream_line_parser"):
                agentConfig["dogstreams"] = ':'.join(
                    [log_path,
                     config.get("Main", "dogstream_line_parser")])
            else:
                agentConfig["dogstreams"] = log_path

        elif config.has_option("Main", "dogstreams"):
            agentConfig["dogstreams"] = config.get("Main", "dogstreams")

        if config.has_option("Main", "nagios_perf_cfg"):
            agentConfig["nagios_perf_cfg"] = config.get(
                "Main", "nagios_perf_cfg")

        if config.has_option("Main", "use_curl_http_client"):
            agentConfig["use_curl_http_client"] = _is_affirmative(
                config.get("Main", "use_curl_http_client"))
        else:
            # Default to False as there are some issues with the curl client and ELB
            agentConfig["use_curl_http_client"] = False

        if config.has_section('WMI'):
            agentConfig['WMI'] = {}
            for key, value in config.items('WMI'):
                agentConfig['WMI'][key] = value

        if (config.has_option("Main", "limit_memory_consumption") and
                config.get("Main", "limit_memory_consumption") is not None):
            agentConfig["limit_memory_consumption"] = int(
                config.get("Main", "limit_memory_consumption"))
        else:
            agentConfig["limit_memory_consumption"] = None

        if config.has_option("Main", "skip_ssl_validation"):
            agentConfig["skip_ssl_validation"] = _is_affirmative(
                config.get("Main", "skip_ssl_validation"))

        agentConfig["collect_instance_metadata"] = True
        if config.has_option("Main", "collect_instance_metadata"):
            agentConfig["collect_instance_metadata"] = _is_affirmative(
                config.get("Main", "collect_instance_metadata"))

        agentConfig["proxy_forbid_method_switch"] = False
        if config.has_option("Main", "proxy_forbid_method_switch"):
            agentConfig["proxy_forbid_method_switch"] = _is_affirmative(
                config.get("Main", "proxy_forbid_method_switch"))

        agentConfig["collect_ec2_tags"] = False
        if config.has_option("Main", "collect_ec2_tags"):
            agentConfig["collect_ec2_tags"] = _is_affirmative(
                config.get("Main", "collect_ec2_tags"))

        agentConfig["utf8_decoding"] = False
        if config.has_option("Main", "utf8_decoding"):
            agentConfig["utf8_decoding"] = _is_affirmative(
                config.get("Main", "utf8_decoding"))

        agentConfig["gce_updated_hostname"] = False
        if config.has_option("Main", "gce_updated_hostname"):
            agentConfig["gce_updated_hostname"] = _is_affirmative(
                config.get("Main", "gce_updated_hostname"))

    except ConfigParser.NoSectionError, e:
        sys.stderr.write('Config file not found or incorrectly formatted.\n')
        sys.exit(2)
Пример #56
0
def get_server(name, default=None):
    if config.has_option('Server', name):
        return config.get('Server', name)
    else:
        return default
Пример #57
0
def config_get_safe(config, section, key, default=None):
    try:
        return config.get(section, key)
    except configparser.NoOptionError:
        return default
Пример #58
0
    logger (Logger): the object for logging

"""

import ConfigParser
import datetime
import logging.config
import os

from bs4 import BeautifulSoup
import requests

config = ConfigParser.RawConfigParser()
config.read('/etc/calfresh/calfresh.conf')

temp_dir = config.get('filepaths', 'temp')
data_dir = config.get('filepaths', 'data')

logging.config.fileConfig(config.get('filepaths', 'config'))
logger = logging.getLogger('web_crawler')


class WebCrawler(object):
    """The WebCrawler gets today's and yesterday's html files for a given url
        and uses the PageParser to identify new and updated files

    Args:
        table (str): the table we're currently working on
        url (str): the url for the table's data

    Returns:
Пример #59
0
def get_config(parse_args=True, cfg_path=None, options=None):
    if parse_args:
        options, _ = get_parsed_args()

    # General config
    agentConfig = {
        'check_freq': DEFAULT_CHECK_FREQUENCY,
        'dogstatsd_interval': DEFAULT_STATSD_FREQUENCY,
        'dogstatsd_normalize': 'yes',
        'dogstatsd_port': 8125,
        'dogstatsd_target': 'http://localhost:17123',
        'graphite_listen_port': None,
        'hostname': None,
        'listen_port': None,
        'tags': None,
        'use_ec2_instance_id': False,  # DEPRECATED
        'version': get_version(),
        'watchdog': True,
        'additional_checksd': '/etc/dd-agent/checks.d/',
    }

    dogstatsd_interval = DEFAULT_STATSD_FREQUENCY

    # Config handling
    try:
        # Find the right config file
        path = os.path.realpath(__file__)
        path = os.path.dirname(path)

        config_path = get_config_path(cfg_path, os_name=get_os())
        config = ConfigParser.ConfigParser()
        config.readfp(skip_leading_wsp(open(config_path)))

        # bulk import
        for option in config.options('Main'):
            agentConfig[option] = config.get('Main', option)

        #
        # Core config
        #

        # FIXME unnecessarily complex

        if config.has_option('Main', 'use_dd'):
            agentConfig['use_dd'] = config.get('Main',
                                               'use_dd').lower() in ("yes",
                                                                     "true")
        else:
            agentConfig['use_dd'] = True

        agentConfig['use_forwarder'] = False
        if options is not None and options.use_forwarder:
            listen_port = 17123
            if config.has_option('Main', 'listen_port'):
                listen_port = int(config.get('Main', 'listen_port'))
            agentConfig['dd_url'] = "http://localhost:" + str(listen_port)
            agentConfig['use_forwarder'] = True
        elif options is not None and not options.disable_dd and options.dd_url:
            agentConfig['dd_url'] = options.dd_url
        else:
            agentConfig['dd_url'] = config.get('Main', 'dd_url')
        if agentConfig['dd_url'].endswith('/'):
            agentConfig['dd_url'] = agentConfig['dd_url'][:-1]

        # Extra checks.d path
        # the linux directory is set by default
        if config.has_option('Main', 'additional_checksd'):
            agentConfig['additional_checksd'] = config.get(
                'Main', 'additional_checksd')
        elif get_os() == 'windows':
            # default windows location
            common_path = _windows_commondata_path()
            agentConfig['additional_checksd'] = os.path.join(
                common_path, 'Datadog', 'checks.d')

        # Whether also to send to Pup
        if config.has_option('Main', 'use_pup'):
            agentConfig['use_pup'] = config.get('Main',
                                                'use_pup').lower() in ("yes",
                                                                       "true")
        else:
            agentConfig['use_pup'] = True

        # Concerns only Windows
        if config.has_option('Main', 'use_web_info_page'):
            agentConfig['use_web_info_page'] = config.get(
                'Main', 'use_web_info_page').lower() in ("yes", "true")
        else:
            agentConfig['use_web_info_page'] = True

        if agentConfig['use_pup'] or agentConfig['use_web_info_page']:
            if config.has_option('Main', 'pup_url'):
                agentConfig['pup_url'] = config.get('Main', 'pup_url')
            else:
                agentConfig['pup_url'] = 'http://localhost:17125'

            if config.has_option('Main', 'pup_port'):
                agentConfig['pup_port'] = int(config.get('Main', 'pup_port'))

        # Increases the frequency of statsd metrics when only sending to Pup
        if not agentConfig['use_dd'] and agentConfig['use_pup']:
            dogstatsd_interval = PUP_STATSD_FREQUENCY

        if not agentConfig['use_dd'] and not agentConfig['use_pup']:
            sys.stderr.write(
                "Please specify at least one endpoint to send metrics to. This can be done in datadog.conf."
            )
            exit(2)

        # Which API key to use
        agentConfig['api_key'] = config.get('Main', 'api_key')

        # local traffic only? Default to no
        agentConfig['non_local_traffic'] = False
        if config.has_option('Main', 'non_local_traffic'):
            agentConfig['non_local_traffic'] = config.get(
                'Main', 'non_local_traffic').lower() in ("yes", "true")

        # DEPRECATED
        if config.has_option('Main', 'use_ec2_instance_id'):
            use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id')
            # translate yes into True, the rest into False
            agentConfig['use_ec2_instance_id'] = (
                use_ec2_instance_id.lower() == 'yes')

        if config.has_option('Main', 'check_freq'):
            try:
                agentConfig['check_freq'] = int(
                    config.get('Main', 'check_freq'))
            except Exception:
                pass

        # Disable Watchdog (optionally)
        if config.has_option('Main', 'watchdog'):
            if config.get('Main', 'watchdog').lower() in ('no', 'false'):
                agentConfig['watchdog'] = False

        # Optional graphite listener
        if config.has_option('Main', 'graphite_listen_port'):
            agentConfig['graphite_listen_port'] = \
                int(config.get('Main', 'graphite_listen_port'))
        else:
            agentConfig['graphite_listen_port'] = None

        # Dogstatsd config
        dogstatsd_defaults = {
            'dogstatsd_port': 8125,
            'dogstatsd_target': 'http://localhost:17123',
            'dogstatsd_interval': dogstatsd_interval,
            'dogstatsd_normalize': 'yes',
        }
        for key, value in dogstatsd_defaults.iteritems():
            if config.has_option('Main', key):
                agentConfig[key] = config.get('Main', key)
            else:
                agentConfig[key] = value

        #Forwarding to external statsd server
        if config.has_option('Main', 'statsd_forward_host'):
            agentConfig['statsd_forward_host'] = config.get(
                'Main', 'statsd_forward_host')
            if config.has_option('Main', 'statsd_forward_port'):
                agentConfig['statsd_forward_port'] = int(
                    config.get('Main', 'statsd_forward_port'))

        # normalize 'yes'/'no' to boolean
        dogstatsd_defaults['dogstatsd_normalize'] = _is_affirmative(
            dogstatsd_defaults['dogstatsd_normalize'])

        # optionally send dogstatsd data directly to the agent.
        if config.has_option('Main', 'dogstatsd_use_ddurl'):
            use_ddurl = _is_affirmative(
                config.get('Main', 'dogstatsd_use_ddurl'))
            if use_ddurl:
                agentConfig['dogstatsd_target'] = agentConfig['dd_url']

        # Optional config
        # FIXME not the prettiest code ever...
        if config.has_option('Main', 'use_mount'):
            agentConfig['use_mount'] = _is_affirmative(
                config.get('Main', 'use_mount'))

        if config.has_option('Main', 'autorestart'):
            agentConfig['autorestart'] = _is_affirmative(
                config.get('Main', 'autorestart'))

        try:
            filter_device_re = config.get('Main', 'device_blacklist_re')
            agentConfig['device_blacklist_re'] = re.compile(filter_device_re)
        except ConfigParser.NoOptionError:
            pass

        if config.has_option('datadog', 'ddforwarder_log'):
            agentConfig['has_datadog'] = True

        # Dogstream config
        if config.has_option("Main", "dogstream_log"):
            # Older version, single log support
            log_path = config.get("Main", "dogstream_log")
            if config.has_option("Main", "dogstream_line_parser"):
                agentConfig["dogstreams"] = ':'.join(
                    [log_path,
                     config.get("Main", "dogstream_line_parser")])
            else:
                agentConfig["dogstreams"] = log_path

        elif config.has_option("Main", "dogstreams"):
            agentConfig["dogstreams"] = config.get("Main", "dogstreams")

        if config.has_option("Main", "nagios_perf_cfg"):
            agentConfig["nagios_perf_cfg"] = config.get(
                "Main", "nagios_perf_cfg")

        if config.has_section('WMI'):
            agentConfig['WMI'] = {}
            for key, value in config.items('WMI'):
                agentConfig['WMI'][key] = value

        if config.has_option("Main", "limit_memory_consumption") and \
            config.get("Main", "limit_memory_consumption") is not None:
            agentConfig["limit_memory_consumption"] = int(
                config.get("Main", "limit_memory_consumption"))
        else:
            agentConfig["limit_memory_consumption"] = None

        if config.has_option("Main", "skip_ssl_validation"):
            agentConfig["skip_ssl_validation"] = _is_affirmative(
                config.get("Main", "skip_ssl_validation"))

    except ConfigParser.NoSectionError, e:
        sys.stderr.write('Config file not found or incorrectly formatted.\n')
        sys.exit(2)
Пример #60
0
def get_logging_config(cfg_path=None):
    system_os = get_os()
    logging_config = {
        'log_level': None,
        'log_to_event_viewer': False,
        'log_to_syslog': False,
        'syslog_host': None,
        'syslog_port': None,
    }
    if system_os == 'windows':
        logging_config['windows_collector_log_file'] = os.path.join(
            _windows_commondata_path(), 'Datadog', 'logs', 'collector.log')
        logging_config['windows_forwarder_log_file'] = os.path.join(
            _windows_commondata_path(), 'Datadog', 'logs', 'forwarder.log')
        logging_config['windows_dogstatsd_log_file'] = os.path.join(
            _windows_commondata_path(), 'Datadog', 'logs', 'dogstatsd.log')
        logging_config['jmxfetch_log_file'] = os.path.join(
            _windows_commondata_path(), 'Datadog', 'logs', 'jmxfetch.log')
    else:
        logging_config['collector_log_file'] = '/var/log/datadog/collector.log'
        logging_config['forwarder_log_file'] = '/var/log/datadog/forwarder.log'
        logging_config['dogstatsd_log_file'] = '/var/log/datadog/dogstatsd.log'
        logging_config['jmxfetch_log_file'] = '/var/log/datadog/jmxfetch.log'
        logging_config['log_to_syslog'] = True

    config_path = get_config_path(cfg_path, os_name=system_os)
    config = ConfigParser.ConfigParser()
    config.readfp(skip_leading_wsp(open(config_path)))

    if config.has_section('handlers') or config.has_section(
            'loggers') or config.has_section('formatters'):
        if system_os == 'windows':
            config_example_file = "https://github.com/DataDog/dd-agent/blob/master/packaging/datadog-agent/win32/install_files/datadog_win32.conf"
        else:
            config_example_file = "https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example"

        sys.stderr.write(
            """Python logging config is no longer supported and will be ignored.
            To configure logging, update the logging portion of 'datadog.conf' to match:
             '%s'.
             """ % config_example_file)

    for option in logging_config:
        if config.has_option('Main', option):
            logging_config[option] = config.get('Main', option)

    levels = {
        'CRITICAL': logging.CRITICAL,
        'DEBUG': logging.DEBUG,
        'ERROR': logging.ERROR,
        'FATAL': logging.FATAL,
        'INFO': logging.INFO,
        'WARN': logging.WARN,
        'WARNING': logging.WARNING,
    }
    if config.has_option('Main', 'log_level'):
        logging_config['log_level'] = levels.get(
            config.get('Main', 'log_level'))

    if config.has_option('Main', 'log_to_syslog'):
        logging_config['log_to_syslog'] = config.get(
            'Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'log_to_event_viewer'):
        logging_config['log_to_event_viewer'] = config.get(
            'Main',
            'log_to_event_viewer').strip().lower() in ['yes', 'true', 1]

    if config.has_option('Main', 'syslog_host'):
        host = config.get('Main', 'syslog_host').strip()
        if host:
            logging_config['syslog_host'] = host
        else:
            logging_config['syslog_host'] = None

    if config.has_option('Main', 'syslog_port'):
        port = config.get('Main', 'syslog_port').strip()
        try:
            logging_config['syslog_port'] = int(port)
        except Exception:
            logging_config['syslog_port'] = None

    if config.has_option('Main', 'disable_file_logging'):
        logging_config['disable_file_logging'] = config.get(
            'Main',
            'disable_file_logging').strip().lower() in ['yes', 'true', 1]
    else:
        logging_config['disable_file_logging'] = False

    return logging_config