예제 #1
0
def sync(**cli_options):

    # Just for process output
    with open(get_resource("logging.yml")) as log_cfg:
        logging.config.dictConfig(yaml.safe_load(log_cfg))

    if cli_options['config_path']:
        props_filename = cli_options['config_path']
        working_dir = os.path.dirname(props_filename)
    else:
        props_filename = 'auto_sync.ini'
        working_dir = "."

    if not os.path.exists(props_filename):
        raise IOError(
            "Error - file '{}' cannot be found.  Please check path or run 'auto_ust.exe generate' "
            "to create a default version".format(props_filename))

    # Load the default config and merge the user values into it
    # We need to determine if there is an entry specifying the
    # folder for yml files and set it
    config = ConfigLoader.load(props_filename)
    working_dir = os.path.abspath(working_dir)
    sync_script = config.get_config('sync').require('sync_script')
    if not os.path.isabs(sync_script):
        sync_script = os.path.abspath(os.path.join(working_dir, sync_script))
    if not os.path.exists(sync_script):
        raise IOError(
            "Error - file '{}' cannot be found.  Please check path or run 'auto_ust.exe generate' "
            "to create a default version".format(props_filename))

    sync_folder = config.get_config('sync').get('sync_folder', working_dir)

    if not os.path.isabs(sync_folder):
        sync_folder = os.path.abspath(os.path.join(working_dir, sync_folder))

    if not os.path.exists(sync_folder):
        raise IOError(
            "Error - specified folder '{}' cannot be found.".format(props_filename))

    # Set the directory and read the sources
    config.set_source_dir(sync_folder)
    sync_config = config.get_config("sync")

    # Output folder for sync logs
    log_folder = os.path.join(working_dir, sync_config.get("log_folder", "logs"))
    os.makedirs(log_folder, exist_ok=True)

    # The "base" config into which sync config gets merged
    template_config = config.get_resource_config().merge_with(sync_config.values)
    template_config.set_value("log_folder", os.path.abspath(log_folder))
    if cli_options['args']:
        template_config.set_value("default_args", cli_options['args'])

    # Execute (single thread for now)
    for config in read_file(sync_script):
        sync = Sync(config)
        w = Worker(sync, template_config)
        w.run()
예제 #2
0
def load_event_service_info():
    """Loads Event Service information

        Loads DeliveryRetryAttempts and DeliveryRetryIntervalSeconds
        from CONFIG file and store it in a global var.

        Exceptions:
            ValueError: DeliveryRetryAttempts and
            DeliveryRetryIntervalSeconds must be integers greater than zero.
    """
    app_config = config.get_config()
    event_service = dict(app_config.items("event_service"))

    try:
        delivery_retry_attempts = \
            int(event_service["DeliveryRetryAttempts"])
        delivery_retry_interval = \
            int(event_service["DeliveryRetryIntervalSeconds"])

        if delivery_retry_attempts <= 0 or delivery_retry_interval <= 0:
            raise OneViewRedfishInvalidAttributeValueException(
                "DeliveryRetryAttempts and DeliveryRetryIntervalSeconds "
                "must be an integer greater than zero.")
    except ValueError:
        raise OneViewRedfishInvalidAttributeValueException(
            "DeliveryRetryAttempts and DeliveryRetryIntervalSeconds "
            "must be valid integers.")

    globals()['delivery_retry_attempts'] = delivery_retry_attempts
    globals()['delivery_retry_interval'] = delivery_retry_interval
예제 #3
0
def main():
    p = create_arg_parser()
    args = p.parse_args()
    conf = config.get_config(args)
    config.configure_logging(args, conf)
    def_args = [args, conf]
    def_kwargs = {}
    known_commands = {
        'parse': {
            'f': parse.main,
            'a': def_args,
            'kw': def_kwargs
        },
        'plot': {
            'f': plot.main,
            'a': def_args,
            'kw': def_kwargs
        },
        'simulate': {
            'f': simulate.main,
            'a': def_args,
            'kw': def_kwargs
        },
        'statistics': {
            'f': statistics.main,
            'a': def_args,
            'kw': def_kwargs
        },
    }
    if args.command not in known_commands:
        p.print_help()
        return
    rand.init(args.seed)
    c = known_commands[args.command]
    exit(c['f'](*c['a'], **c['kw']))
예제 #4
0
def setup_logging(path='logging.json', default_level=logging.INFO):
    detect_result = detect_file(path)
    json_path = detect_result["file"]

    if os.path.exists(json_path):

        real_path = ''
        base_config = config.get_config()
        base_conf = base_config["bases"]
        env = base_config["env"]

        if base_conf.get('log_path') and os.path.exists('/data'):
            real_path = base_conf['log_path'] + '/' + base_conf["server"][
                "name"] + '/' + env
            if not os.path.exists(real_path):
                try:
                    os.makedirs(real_path)
                except:
                    real_path = ''
        if not real_path:
            real_path = detect_result['server_path'] + '/logs'

        with open(json_path, 'rt', encoding="utf-8") as f:
            log_config = json.load(f)
            for k, v in log_config.items():
                if k != 'handlers':
                    continue
                for k1, v1 in v.items():
                    if 'filename' in v1:
                        v1['filename'] = os.path.join(real_path,
                                                      v1['filename'])
        logging.config.dictConfig(log_config)
    else:
        logging.basicConfig(level=default_level)
예제 #5
0
def generate_certificate(dir_name, file_name, key_length, key_type="rsa"):
    """Create self-signed cert and key files

        Args:
            dir_name: name of the directory to store the files
            file_name: name of the files that will be created. It will append
                .crt to certificate file and .key to key file
            key_length: key length in bits
            key_type: crypto type: RSA or DSA; defaults to RSA
        Returns:
            Nothing
        Exceptions:
            Raise exceptions on error
    """

    app_config = config.get_config()
    private_key = OpenSSL.crypto.PKey()
    if key_type == "rsa":
        private_key.generate_key(OpenSSL.crypto.TYPE_RSA, key_length)
    elif key_type == "dsa":
        private_key.generate_key(OpenSSL.crypto.TYPE_DSA, key_length)
    else:
        message = "Invalid key_type"
        logging.error(message)
        raise OneViewRedfishInvalidAttributeValueException(message)

    if not app_config.has_option("ssl-cert-defaults", "commonName"):
        app_config["ssl-cert-defaults"]["commonName"] = get_ip()

    cert = OpenSSL.crypto.X509()
    cert_subject = cert.get_subject()

    cert_defaults = dict(app_config.items("ssl-cert-defaults"))

    for key, value in cert_defaults.items():
        setattr(cert_subject, key, value)

    cert.set_serial_number(1)
    cert.gmtime_adj_notBefore(0)
    cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
    cert.set_issuer(cert.get_subject())
    cert.set_pubkey(private_key)
    cert.sign(private_key, "sha1")

    # Save Files
    with open(os.path.join(dir_name, file_name + ".crt"), "wt") as f:
        f.write(
            OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
                                            cert).decode("UTF-8"))
    with open(os.path.join(dir_name, file_name + ".key"), "wt") as f:
        f.write(
            OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
                                           private_key).decode("UTF-8"))
예제 #6
0
파일: main.py 프로젝트: corka149/iot_server
def configure_database():
    """Configures everything around the database on the startup."""
    host = config.get_config("database.host")

    # Test profile uses mock mongo db
    if os.getenv("IOT_SERVER_PROFILE", "test") == "test":
        mongoengine.connect("iot", host=host)
    else:
        port = config.get_config("database.port")
        username = config.get_config("database.username")
        password = config.get_config("database.password")
        authentication_source = config.get_config(
            "database.authentication_source")
        mongoengine.connect(
            "iot",
            host=host,
            port=port,
            username=username,
            authentication_source=authentication_source,
            password=password,
        )
예제 #7
0
    def load_settings(self, startup=False):
        # Logging
        # Delete old log files if user set to do so
        if startup and config.get_key('logging', 'persistent') == 'False':
            log_file = __directory__ / '../default.log'
            if os.path.exists(log_file):
                os.remove(log_file)

        logging.config.fileConfig(config.get_config('logging'),
                                  disable_existing_loggers=False)

        # PyQtGraph
        value = config.get_key('pyqtgraph', 'background')
        if value == 'None':
            pg.setConfigOption('background', None)

        else:
            pg.setConfigOption('background', value)

        value = config.get_key('pyqtgraph', 'foreground')
        if value == 'None':
            pg.setConfigOption('foreground', None)

        else:
            pg.setConfigOption('foreground', value)

        value = config.get_key('pyqtgraph', 'antialias')
        if value == 'True':
            pg.setConfigOption('antialias', True)

        else:
            pg.setConfigOption('antialias', False)

        value = config.get_key('pyqtgraph', 'imageAxisOrder')
        pg.setConfigOption('imageAxisOrder', value)

        self.setFont(
            QFont(config.get_key('font', 'font'),
                  int(config.get_key('font', 'size')), QFont.Normal))

        logging.getLogger('kmap').debug('Settings loaded successfully.')

        # MatPlotlib
        path = config.get_key('paths', 'matplotlib')
        if path != 'None':
            path = __directory__ / path
            plt.rcParams['savefig.directory'] = str(path)
예제 #8
0
def main():
    'Parse args, then start reading queue forever.'
    possible_qnames = ['transfer', 'submit']
    parser = argparse.ArgumentParser(
        description='Data Queue service',
        epilog='EXAMPLE: %(prog)s --loglevel DEBUG &')

    #!parser.add_argument('--cfg',
    #!                    help='Configuration file (json format)',
    #!                    type=argparse.FileType('r'))
    parser.add_argument('--logconf',
                        help='Logging configuration file (YAML format)',
                        default='/etc/tada/pop.yaml',
                        type=argparse.FileType('r'))
    parser.add_argument('--queue',
                        '-q',
                        choices=possible_qnames,
                        help='Name of queue to pop from. Must be in cfg file.')

    parser.add_argument(
        '--loglevel',
        help='Kind of diagnostic output',
        choices=['CRTICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
        default='WARNING')
    args = parser.parse_args()

    log_level = getattr(logging, args.loglevel.upper(), None)
    if not isinstance(log_level, int):
        parser.error('Invalid log level: %s' % args.loglevel)
    logging.basicConfig(level=log_level,
                        format='%(levelname)s %(message)s',
                        datefmt='%m-%d %H:%M')
    logging.debug('\nDebug output is enabled!!')

    logDict = yaml.load(args.logconf)
    print('logDict={}'.format(logDict), flush=True)
    logging.config.dictConfig(logDict)
    logging.getLogger().setLevel(log_level)

    ###########################################################################

    #!qcfg, dirs = config.get_config(possible_qnames)
    qcfg = config.get_config()
    #!du.save_pid(sys.argv[0], piddir=qcfg['dirs']['run_dir'])

    process_queue_forever(args.queue, qcfg)
def status():
    liveconfig = shelve.open(config.systemconfig['live_config_file'], writeback=True)
    perf_filename = liveconfig['data_dir'] + '/vspheredatacollection.data.gz'
    inv_filename = liveconfig['data_dir'] + '/vsphereinventory.gz'
    debug_filename = "./debug.log"

    perf_size = 0
    inv_size = 0

    try:
        perf_size = os.path.getsize(perf_filename)
    except:
        perf_size = "NotFound"

    try:
        inv_size = os.path.getsize(inv_filename)
    except:
        inv_size = "NotFound"

    try:
        debug_size = sizeof_fmt(os.path.getsize(debug_filename))
    except:
        debug_size = "NotFound"

    perf_estimate = 0
    try:
        perf_estimate = (perf_size / int(r.get("runs_completed"))) * int(config.get_config('run_count'))
    except:
        pass


    return render_template('status.html',
                           jobs=sched.get_jobs(),
                           datafile_name=perf_filename,
                           datafile_size=sizeof_fmt(perf_size),
                           inventory_name=inv_filename,
                           inventory_size=sizeof_fmt(inv_size),
                           debug_name=debug_filename,
                           debug_size=debug_size,
                           runs = liveconfig['runs_completed'],
                           runs_target = liveconfig['run_count'],
                           perf_estimate = sizeof_fmt(perf_estimate)

    )
    liveconfig.close()
예제 #10
0
def main():
    'Parse args, then start reading queue forever.'
    possible_qnames = ['transfer', 'submit']
    parser = argparse.ArgumentParser(
        description='Data Queue service',
        epilog='EXAMPLE: %(prog)s --loglevel DEBUG &'
        )

    #!parser.add_argument('--cfg',
    #!                    help='Configuration file (json format)',
    #!                    type=argparse.FileType('r'))
    parser.add_argument('--logconf',
                        help='Logging configuration file (YAML format)',
                        default='/etc/tada/pop.yaml',
                        type=argparse.FileType('r'))
    parser.add_argument('--queue', '-q',
                        choices=possible_qnames,
                        help='Name of queue to pop from. Must be in cfg file.')

    parser.add_argument('--loglevel',
                        help='Kind of diagnostic output',
                        choices=['CRTICAL', 'ERROR', 'WARNING',
                                 'INFO', 'DEBUG'],
                        default='WARNING')
    args = parser.parse_args()

    log_level = getattr(logging, args.loglevel.upper(), None)
    if not isinstance(log_level, int):
        parser.error('Invalid log level: %s' % args.loglevel)
    logging.basicConfig(level=log_level,
                        format='%(levelname)s %(message)s',
                        datefmt='%m-%d %H:%M')
    logging.debug('\nDebug output is enabled!!')

    logDict = yaml.load(args.logconf)
    print('logDict={}'.format(logDict), flush=True)
    logging.config.dictConfig(logDict)
    logging.getLogger().setLevel(log_level)

    ###########################################################################

    #!qcfg, dirs = config.get_config(possible_qnames)
    qcfg = config.get_config()
    #!du.save_pid(sys.argv[0], piddir=qcfg['dirs']['run_dir'])
    process_queue_forever(args.queue, qcfg)
예제 #11
0
def get_ip():
    """Tries to detect default route IP Address"""
    app_config = config.get_config()
    ov_ip = app_config["oneview_config"]["ip"]
    if ov_ip:
        host_ip = ov_ip
    else:
        host_ip = "8.8.8.8"

    s = socket.socket(type=socket.SOCK_DGRAM)
    try:
        s.connect((host_ip, 1))
        ip = s.getsockname()[0]
    except Exception as e:
        logging.exception(e)
        ip = "127.0.0.1"
    finally:
        s.close()
    return ip
예제 #12
0
def get_config() -> Dict[str, Union[str, bool]]:
    """Get the application configuration."""
    config.init(context.config.get_main_option("app.cfg"))
    settings: Dict[str, Union[str, bool]] = {}
    settings.update(config.get_config())
    alembic_name = context.config.get_main_option("type")
    schema_config_name = "schema{}".format(
        f"_{alembic_name}" if alembic_name != "main" else "")
    script_location = context.config.get_main_option("script_location")
    version_table = context.config.get_main_option("version_table")
    version_locations = context.config.get_main_option("version_locations")
    assert script_location
    assert version_table
    assert version_locations
    settings.update({
        "script_location": script_location,
        "version_table": version_table,
        "version_locations": version_locations,
        "version_table_schema": config[schema_config_name],
    })
    return settings
예제 #13
0
    def load_settings(self, startup=False):
        logging.config.fileConfig(config.get_config(
            'logging'), disable_existing_loggers=False)

        # PyQtGraph
        value = config.get_key('pyqtgraph', 'background')
        if value == 'None':
            pg.setConfigOption('background', None)

        else:
            pg.setConfigOption('background', value)

        value = config.get_key('pyqtgraph', 'foreground')
        if value == 'None':
            pg.setConfigOption('foreground', None)

        else:
            pg.setConfigOption('foreground', value)

        value = config.get_key('pyqtgraph', 'antialias')
        if value == 'True':
            pg.setConfigOption('antialias', True)

        else:
            pg.setConfigOption('antialias', False)

        value = config.get_key('pyqtgraph', 'imageAxisOrder')
        pg.setConfigOption('imageAxisOrder', value)

        self.setFont(QFont(config.get_key('font', 'font'), int(
            config.get_key('font', 'size')), QFont.Normal))

        logging.getLogger('kmap').debug('Settings loaded successfully.')

        # MatPlotlib
        path = config.get_key('paths', 'matplotlib')
        if path != 'None':
            path = __directory__ / path
            plt.rcParams['savefig.directory'] = str(path)
def main():
    global logger

    setup_logging()

    logger = logging.getLogger(__name__)

    logger.info('Starting script.')

    # config.settings

    if config.get_config():

        new_book_table = getnewbooks()

        if new_book_table:
            logger.info("We found " + str(len(new_book_table)) + ' new books.')
            buildnewsletter(new_book_table)
        else:
            logger.info("We didn't find any books.")
    else:
        logger.error("No configuration loaded.")

    logger.info('Finishing script.')
예제 #15
0
def main():
    p = create_arg_parser()
    args = p.parse_args()
    conf = config.get_config(args)
    config.configure_logging(args, conf)
    def_args = [args, conf]
    def_kwargs = {}
    known_commands = {
        'client': {
            'f': client.main,
            'a': def_args,
            'kw': def_kwargs
        },
        'server': {
            'f': server.main,
            'a': def_args,
            'kw': def_kwargs
        },
    }
    if args.command not in known_commands:
        p.print_help()
        return
    c = known_commands[args.command]
    exit(c['f'](*c['a'], **c['kw']))
예제 #16
0
def main():
    'Parse command line (a mini-interpreter) and do the work.'
    possible_qnames = ['transfer', 'submit']
    parser = argparse.ArgumentParser(
        description='Modify or display the data queue',
        epilog='EXAMPLE: %(prog)s --summary'
    )
    parser.add_argument('--queue', '-q',
                        default='submit',
                        choices=possible_qnames,
                        help='Name of queue to pop from. Must be in cfg file.')

    parser.add_argument('--version', action='version', version='%(prog)s 1.0.2')
    parser.add_argument('--summary', '-s',
                        help='Show summary of queue contents.',
                        action='store_true')
    parser.add_argument('--info', '-i', help='Show info about Redis server.',
                        action='store_true')
    parser.add_argument('--list', '-l',
                        help='List queue',
                        choices=['active', 'inactive', 'records'])
    parser.add_argument('--action', '-a',
                        help='Turn on/off running actions on queue records.',
                        default=None,
                        choices=['on', 'off'])
    parser.add_argument('--read', '-r',
                        help='Turn on/off reading socket and pushing to queue.',
                        default=None,
                        choices=['on', 'off'])
    parser.add_argument('--clear', help='Delete queue related data from DB',
                        action='store_true')

    parser.add_argument('--dump',
                        help='Dump copy of queue into this file',
                        type=argparse.FileType('w'))
    parser.add_argument('--push',
                        help='File of data records to load into queue.'
                        +' Multiple allowed.  Use "-" for stdin',
                        action='append')
    parser.add_argument('--pushstr',
                        help='A single string to load into queue.'
                        +' Space delimited string must contain at least'
                        +' "checksum filename".')

    parser.add_argument('--advance',
                        help='Move records to end of queue.',
                        nargs=2)

    parser.add_argument('--deactivate',
                        help='Move selected records to INACTIVE',
                        nargs=2)
    parser.add_argument('--activate',
                        help='Move selected records to ACTIVE',
                        nargs=2)
    parser.add_argument('--redo',
                        help='Move ALL records to ACTIVE',
                        action='store_true'
                        )

    parser.add_argument('--loglevel',
                        help='Kind of diagnostic output',
                        choices=['CRTICAL','ERROR','WARNING','INFO','DEBUG'],
                        default='WARNING',
    )
    args = parser.parse_args()


    numeric_level = getattr(logging, args.loglevel.upper(), None)
    if not isinstance(numeric_level, int):
        parser.error('Invalid log level: %s' % args.loglevel) 
        logging.config.dictConfig(LOG_SETTINGS)


    logging.debug('Debug output is enabled!!')

    ############################################################################

    #!qcfg, dirs = config.get_config(possible_qnames)

    qcfg = config.get_config()
    qname = args.queue
    #!max_qsize = qcfg.get('maximum_queue_size',11000)
    #!host = qcfg['dq_host']
    #!port = qcfg['redis_port']
    max_qsize = qcfg['queues'][qname]['maximum_queue_size']
    host = qcfg['queues'][qname]['dq_host']
    port = qcfg['queues'][qname]['dq_port']

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    red = ru.redis_protocol()

    if args.clear:
        clear_db(red)

    if args.action is not None:
        red.set(actionP, args.action)
        red.lpush(dummy, 'ignore')
        if args.read is not None:
            red.set(readP, args.read)


    if args.list:
        list_queue(red, args.list)

    if args.dump:
        dump_queue(red, args.dump)

    if args.push:
        push_queue(host, port, args.push, max_qsize)
    if args.pushstr:
        push_string(red, args.pushstr)

    if args.advance:
        advance_range(red, args.advance[0], args.advance[1])

    if args.deactivate:
        deactivate_range(red, args.deactivate[0], args.deactivate[1])

    if args.activate:
        activate_range(red, args.activate[0], args.activate[1])

    if args.redo:
        activate_all(red)

    if args.info:
        info(red)
        if args.summary:
            summary(red)

    if args.summary:
        summary(red)

    red.save()
예제 #17
0
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import logging
import logging.config

from dao.common import config

opts = [
    config.BoolOpt('common', 'debug', True),
    config.StrOpt('common', 'log_config', '')
]
config.register(opts)
CONF = config.get_config()


def getLogger(name):
    """getting logger"""
    return logging.getLogger(name)


def setup(app_name):
    defaults = dict()
    defaults['app_name'] = app_name
    log_level = 'DEBUG' if CONF.common.debug else 'INFO'
    defaults['log_level'] = log_level
    path = os.path.join(CONF.common.log_config)
    if path:
        logging.config.fileConfig(path, defaults=defaults)
예제 #18
0
import ConfigParser
import logging.config
import config

try:
    logging.config.fileConfig(config.get_config('logging.config.file'))
except ConfigParser.NoSectionError, e:
    pass

class Log:

    def __init__ (self, instance, component=''):
        self._logger = logging.getLogger(instance)
        self.__component = component

    def debug (self, message):
        try:
            self._logger.debug('[%s] %s' % (self.__component, message))
        except:
            pass

    def error (self, message, component=''):
        try:
            self._logger.error('[%s] %s' % (self.__component, message.replace('\n', ' ')))
        except:
            pass

    def info (self, message, component=''):
        try:
            self._logger.info('[%s] %s' % (self.__component, message))
        except:
예제 #19
0
    # SECURITY WARNING: don't run with debug turned on in production!
    DEBUG = True

    # SECURITY WARNING: keep the secret key used in production secret!
    SECRET_KEY = '&j8au*lf%i=d2niee^qlza!d3=$(5c2h^)jt(r(m!u%8+2@wlm'
    ALLOWED_HOSTS = []
    LOG_LEVEL = 'DEBUG'
else:
    DEBUG = False
    ALLOWED_HOSTS = ["*"]
    LOG_LEVEL = 'INFO'

# Get config
config = Config()
CONFIG = config.get_config()

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

DEFAULT_FROM_EMAIL = '*****@*****.**'

# Application definition
INSTALLED_APPS = [
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
예제 #20
0
#! /usr/bin/env python3

import logging
import logging.config
import os
import yaml

from datetime import datetime
from time import localtime, strftime
from sys import platform

import config
import dropfolder_check_csv as dfc
import permissions_fix as permissions

config = config.get_config()

script_root = config['paths']['script_root']
drop_folders = [
    os.path.join(config['paths']['mac_root_path']['storage01'],
                 config['paths']['drop_folder']),
    os.path.join(config['paths']['mac_root_path']['storage02'],
                 config['paths']['drop_folder']),
    os.path.join(config['paths']['mac_root_path']['storage03'],
                 config['paths']['drop_folder']),
    os.path.join(config['paths']['mac_root_path']['storage04'],
                 config['paths']['drop_folder']),
]

logger = logging.getLogger(__name__)
예제 #21
0
import logging.config

from keylime import config


LOG_TO_FILE = ['registrar', 'provider_registrar', 'cloudverifier']
LOG_TO_STREAM = ['tenant_webapp']
LOGDIR = os.getenv('KEYLIME_LOGDIR', '/var/log/keylime')
# not clear that this works right.  console logging may not work
if not config.REQUIRE_ROOT:
    LOGSTREAM = './keylime-stream.log'
else:
    LOGSTREAM = LOGDIR + '/keylime-stream.log'

logging.config.fileConfig(config.get_config())


def set_log_func(loglevel, logger):
    log_func = logger.info

    if loglevel == logging.CRITICAL:
        log_func = logger.critical
    elif loglevel == logging.ERROR:
        log_func = logger.error
    elif loglevel == logging.WARNING:
        log_func = logger.warning
    elif loglevel == logging.INFO:
        log_func = logger.info
    elif loglevel == logging.DEBUG:
        log_func = logger.debug
예제 #22
0
파일: log.py 프로젝트: cash2one/dmp
#!/usr/bin/env python
# -*- encoding: UTF-8 -*-
'''
日志的封装
'''
import logging
import logging.config
import os
import config


logger = None
conf = config.get_config()
__all__ = ['get_logger']


def get_logger():
    global logger

    if logger is None:
        # 只要文件夹名
        log_path = conf.get('log', 'dir')
        cur_path = os.path.abspath(os.path.dirname(__file__))
        log_path = os.path.join(cur_path, '..', log_path)
        if not os.path.exists(log_path):
            os.mkdir(log_path)
        filename = conf.get('log', 'file')
        filename = os.path.join(log_path, filename)

        file_handler = logging.FileHandler(filename)
        file_handler.setLevel(logging.INFO)
예제 #23
0
def main():
    'Parse command line (a mini-interpreter) and do the work.'
    possible_qnames = ['transfer', 'submit']
    parser = argparse.ArgumentParser(
        description='Modify or display the data queue',
        epilog='EXAMPLE: %(prog)s --summary'
    )
    parser.add_argument('--queue', '-q',
                        default='submit',
                        choices=possible_qnames,
                        help='Name of queue to pop from. Must be in cfg file.')

    parser.add_argument('--version', action='version', version='%(prog)s 1.0.2')
    parser.add_argument('--summary', '-s',
                        help='Show summary of queue contents.',
                        action='store_true')
    parser.add_argument('--info', '-i', help='Show info about Redis server.',
                        action='store_true')
    parser.add_argument('--list', '-l',
                        help='List queue',
                        choices=['active', 'inactive', 'records'])
    parser.add_argument('--action', '-a',
                        help='Turn on/off running actions on queue records.',
                        default=None,
                        choices=['on', 'off'])
    parser.add_argument('--read', '-r',
                        help='Turn on/off reading socket and pushing to queue.',
                        default=None,
                        choices=['on', 'off'])
    parser.add_argument('--clear', help='Delete queue related data from DB',
                        action='store_true')

    parser.add_argument('--dump',
                        help='Dump copy of queue into this file',
                        type=argparse.FileType('w'))
    parser.add_argument('--push',
                        help='File of data records to load into queue.'
                        +' Multiple allowed.  Use "-" for stdin',
                        action='append')
    parser.add_argument('--pushstr',
                        help='A single string to load into queue.'
                        +' Space delimited string must contain at least'
                        +' "checksum filename".')

    parser.add_argument('--advance',
                        help='Move records to end of queue.',
                        nargs=2)

    parser.add_argument('--deactivate',
                        help='Move selected records to INACTIVE',
                        nargs=2)
    parser.add_argument('--activate',
                        help='Move selected records to ACTIVE',
                        nargs=2)
    parser.add_argument('--redo',
                        help='Move ALL records to ACTIVE',
                        action='store_true'
                        )

    parser.add_argument('--loglevel',
                        help='Kind of diagnostic output',
                        choices=['CRTICAL','ERROR','WARNING','INFO','DEBUG'],
                        default='WARNING',
    )
    args = parser.parse_args()


    numeric_level = getattr(logging, args.loglevel.upper(), None)
    if not isinstance(numeric_level, int):
        parser.error('Invalid log level: %s' % args.loglevel) 
        logging.config.dictConfig(LOG_SETTINGS)


    logging.debug('Debug output is enabled!!')

    ############################################################################

    #!qcfg, dirs = config.get_config(possible_qnames)

    qcfg = config.get_config()
    qname = args.queue
    #!max_qsize = qcfg.get('maximum_queue_size',11000)
    #!host = qcfg['dq_host']
    #!port = qcfg['redis_port']
    max_qsize = qcfg['queues'][qname]['maximum_queue_size']
    host = qcfg['queues'][qname]['dq_host']
    port = qcfg['queues'][qname]['dq_port']

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    red = ru.redis_protocol()

    if args.clear:
        clear_db(red)

    if args.action is not None:
        red.set(actionP, args.action)
        red.lpush(dummy, 'ignore')
        if args.read is not None:
            red.set(readP, args.read)


    if args.list:
        list_queue(red, args.list)

    if args.dump:
        dump_queue(red, args.dump)

    if args.push:
        push_queue(host, port, args.push, max_qsize)
    if args.pushstr:
        push_string(red, args.pushstr)

    if args.advance:
        advance_range(red, args.advance[0], args.advance[1])

    if args.deactivate:
        deactivate_range(red, args.deactivate[0], args.deactivate[1])

    if args.activate:
        activate_range(red, args.activate[0], args.activate[1])

    if args.redo:
        activate_all(red)

    if args.info:
        info(red)
        if args.summary:
            summary(red)

    if args.summary:
        summary(red)

    red.save()
예제 #24
0
def info(red):
    qcfg = config.get_config()
    print('config=',pformat(qcfg))
    pprint.pprint(red.info())
예제 #25
0
def info(red):
    qcfg = config.get_config()
    print('config=',pformat(qcfg))
    pprint.pprint(red.info())